summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author陈冠林 <[email protected]>2019-06-18 12:02:26 +0800
committer陈冠林 <[email protected]>2019-06-18 12:02:26 +0800
commitfc07ba206f7a77ac013a1af5c2736ff21a53ca24 (patch)
tree024432426c970ab593751795b20b7610561cda49
最终版论文
-rw-r--r--Biblio/gbt7714-plain.bst2017
-rw-r--r--Biblio/gbt7714-unsrt.bst1874
-rw-r--r--Biblio/library.bib599
-rw-r--r--Biblio/ref.bib165
-rw-r--r--Img/80sizedistribution.pngbin0 -> 113996 bytes
-rw-r--r--Img/Allaudio.pngbin0 -> 110338 bytes
-rw-r--r--Img/Content-based.pngbin0 -> 45871 bytes
-rw-r--r--Img/Flowchartofpredictiveprogram.pngbin0 -> 23821 bytes
-rw-r--r--Img/Flowchartofrequest_ag.pngbin0 -> 44758 bytes
-rw-r--r--Img/Flowchartofrequest_fi.pngbin0 -> 26554 bytes
-rw-r--r--Img/Frameworkforfeature.jpgbin0 -> 19213 bytes
-rw-r--r--Img/Mobiledevice1.pngbin0 -> 80211 bytes
-rw-r--r--Img/Mobiledevice2.pngbin0 -> 72735 bytes
-rw-r--r--Img/Multi-threaded.jpgbin0 -> 15456 bytes
-rw-r--r--Img/PACKalgorithm_re.jpgbin0 -> 30625 bytes
-rw-r--r--Img/PACKalgorithm_se.jpgbin0 -> 34481 bytes
-rw-r--r--Img/PACKstructure.jpgbin0 -> 32982 bytes
-rw-r--r--Img/SFH.jpgbin0 -> 31205 bytes
-rw-r--r--Img/Schematicdiagram.jpgbin0 -> 39503 bytes
-rw-r--r--Img/Systemstructure.jpgbin0 -> 17625 bytes
-rw-r--r--Img/Typicaldecision.jpgbin0 -> 39811 bytes
-rw-r--r--Img/handleout-of-order.jpgbin0 -> 26834 bytes
-rw-r--r--Img/numberflow.pngbin0 -> 24598 bytes
-rw-r--r--Img/oaspl_a.pdfbin0 -> 12956 bytes
-rw-r--r--Img/oaspl_b.pdfbin0 -> 12995 bytes
-rw-r--r--Img/oaspl_c.pdfbin0 -> 12985 bytes
-rw-r--r--Img/oaspl_d.pdfbin0 -> 12975 bytes
-rw-r--r--Img/result_a.pngbin0 -> 101760 bytes
-rw-r--r--Img/result_b.pngbin0 -> 94013 bytes
-rw-r--r--Img/result_c.pngbin0 -> 114288 bytes
-rw-r--r--Img/result_d.pngbin0 -> 106770 bytes
-rw-r--r--Img/route.jpgbin0 -> 78133 bytes
-rw-r--r--Img/shock_cyn.jpgbin0 -> 114273 bytes
-rw-r--r--Img/systemframework.pngbin0 -> 25250 bytes
-rw-r--r--Img/tc_q_criteria.jpgbin0 -> 1382852 bytes
-rw-r--r--Img/test.pngbin0 -> 18501 bytes
-rw-r--r--Img/ucas_logo.pdfbin0 -> 41146 bytes
-rw-r--r--Img/youtube.pngbin0 -> 91555 bytes
-rw-r--r--README.md55
-rw-r--r--Style/artracom.sty94
-rw-r--r--Style/artratex.sty660
-rw-r--r--Style/ucasthesis.cfg105
-rw-r--r--Style/ucasthesis.cls546
-rw-r--r--Tex/Appendix.tex24
-rw-r--r--Tex/Backmatter.tex40
-rw-r--r--Tex/Chap_1.tex208
-rw-r--r--Tex/Chap_2.tex127
-rw-r--r--Tex/Chap_3.tex310
-rw-r--r--Tex/Chap_4.tex145
-rw-r--r--Tex/Chap_5.tex87
-rw-r--r--Tex/Chap_6.tex42
-rw-r--r--Tex/Chap_Guide.tex342
-rw-r--r--Tex/Frontmatter.tex77
-rw-r--r--Tex/Mainmatter.tex10
-rw-r--r--Tex/Prematter.tex32
-rw-r--r--Thesis.log1185
-rw-r--r--Thesis.synctex(busy)0
-rw-r--r--Thesis.tex103
-rw-r--r--Tmp/Thesis.aux262
-rw-r--r--Tmp/Thesis.bbl401
-rw-r--r--Tmp/Thesis.blg82
-rw-r--r--Tmp/Thesis.lof25
-rw-r--r--Tmp/Thesis.log2167
-rw-r--r--Tmp/Thesis.lot20
-rw-r--r--Tmp/Thesis.out66
-rw-r--r--Tmp/Thesis.toc66
-rw-r--r--Tmp/陈冠林-硕士学位论文.pdfbin0 -> 1585584 bytes
-rw-r--r--Tmp/陈冠林-硕士学位论文v1.1.pdfbin0 -> 1665868 bytes
-rw-r--r--Tmp/陈冠林-硕士毕业论文.pdfbin0 -> 1273558 bytes
-rw-r--r--Tmp/陈冠林-硕士毕业论文v1.2.pdfbin0 -> 1626584 bytes
-rw-r--r--Tmp/陈冠林-硕士毕业论文v1.3.pdfbin0 -> 1371477 bytes
-rw-r--r--Tmp/陈冠林-硕士毕业论文v1.4.pdfbin0 -> 1293323 bytes
-rw-r--r--Tmp/陈冠林-硕士毕业论文v1.5.pdfbin0 -> 1302977 bytes
-rw-r--r--artratex.bat56
-rw-r--r--artratex.sh110
-rw-r--r--模板使用说明.pdfbin0 -> 2542647 bytes
76 files changed, 12102 insertions, 0 deletions
diff --git a/Biblio/gbt7714-plain.bst b/Biblio/gbt7714-plain.bst
new file mode 100644
index 0000000..038e841
--- /dev/null
+++ b/Biblio/gbt7714-plain.bst
@@ -0,0 +1,2017 @@
+%%
+%% This is file `gbt7714-plain.bst',
+%% generated with the docstrip utility.
+%%
+%% The original source files were:
+%%
+%% gbt7714.dtx (with options: `2015,authoryear')
+%% -------------------------------------------------------------------
+%% GB/T 7714-2015 BibTeX Style
+%% https://github.com/CTeX-org/gbt7714-bibtex-style
+%% Version: 2019/01/02 v1.1
+%% -------------------------------------------------------------------
+%% Copyright (C) 2016-2019 by Zeping Lee <zepinglee AT gmail.com>
+%% -------------------------------------------------------------------
+%% This file may be distributed and/or modified under the
+%% conditions of the LaTeX Project Public License, either version 1.3c
+%% of this license or (at your option) any later version.
+%% The latest version of this license is in
+%% https://www.latex-project.org/lppl.txt
+%% and version 1.3c or later is part of all distributions of LaTeX
+%% version 2005/12/01 or later.
+%% -------------------------------------------------------------------
+
+INTEGERS {
+ uppercase.name
+ max.num.authors
+ period.between.author.year
+ sentence.case.title
+ link.title
+ show.mark
+ slash.for.extraction
+ in.booktitle
+ italic.jounal
+ bold.journal.volume
+ show.missing.address.publisher
+ show.url
+ show.doi
+ show.note
+}
+
+FUNCTION {load.config}
+{
+ #1 'uppercase.name :=
+ #3 'max.num.authors :=
+ #0 'period.between.author.year :=
+ #1 'sentence.case.title :=
+ #0 'link.title :=
+ #1 'show.mark :=
+ #1 'slash.for.extraction :=
+ #0 'in.booktitle :=
+ #0 'italic.jounal :=
+ #0 'bold.journal.volume :=
+ #0 'show.missing.address.publisher :=
+ #1 'show.url :=
+ #1 'show.doi :=
+ #0 'show.note :=
+}
+
+ENTRY
+ { address
+ author
+ booktitle
+ date
+ doi
+ edition
+ editor
+ howpublished
+ institution
+ journal
+ key
+ language
+ mark
+ medium
+ note
+ number
+ organization
+ pages
+ publisher
+ school
+ series
+ title
+ translator
+ url
+ urldate
+ volume
+ year
+ }
+ { entry.lang entry.is.electronic entry.numbered }
+ { label extra.label sort.label short.list entry.mark entry.url }
+
+INTEGERS { output.state before.all mid.sentence after.sentence after.block after.slash }
+
+INTEGERS { lang.zh lang.ja lang.en lang.ru lang.other }
+
+INTEGERS { charptr len }
+
+FUNCTION {init.state.consts}
+{ #0 'before.all :=
+ #1 'mid.sentence :=
+ #2 'after.sentence :=
+ #3 'after.block :=
+ #4 'after.slash :=
+ #3 'lang.zh :=
+ #4 'lang.ja :=
+ #1 'lang.en :=
+ #2 'lang.ru :=
+ #0 'lang.other :=
+}
+
+FUNCTION {bbl.anonymous}
+{ entry.lang lang.zh =
+ { "佚名" }
+ { "Anon" }
+ if$
+}
+
+FUNCTION {bbl.space}
+{ entry.lang lang.zh =
+ { "\ " }
+ { " " }
+ if$
+}
+
+FUNCTION {bbl.et.al}
+{ entry.lang lang.zh =
+ { "等" }
+ { entry.lang lang.ja =
+ { "他" }
+ { entry.lang lang.ru =
+ { "идр" }
+ { "et~al." }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {citation.et.al}
+{ entry.lang lang.zh =
+ { "等" }
+ { "{\bibetal}" }
+ if$
+}
+
+FUNCTION {citation.and} { "{\biband}" }
+
+FUNCTION {bbl.colon} { ": " }
+
+FUNCTION {bbl.wide.space} { "\quad " }
+
+FUNCTION {bbl.slash} { "//\allowbreak " }
+
+FUNCTION {bbl.sine.loco}
+{ entry.lang lang.zh =
+ { "[出版地不详]" }
+ { "[S.l.]" }
+ if$
+}
+
+FUNCTION {bbl.sine.nomine}
+{ entry.lang lang.zh =
+ { "[出版者不详]" }
+ { "[s.n.]" }
+ if$
+}
+
+FUNCTION {bbl.sine.loco.sine.nomine}
+{ entry.lang lang.zh =
+ { "[出版地不详: 出版者不详]" }
+ { "[S.l.: s.n.]" }
+ if$
+}
+
+FUNCTION {not}
+{ { #0 }
+ { #1 }
+ if$
+}
+
+FUNCTION {and}
+{ 'skip$
+ { pop$ #0 }
+ if$
+}
+
+FUNCTION {or}
+{ { pop$ #1 }
+ 'skip$
+ if$
+}
+
+STRINGS { s t }
+
+FUNCTION {output.nonnull}
+{ 's :=
+ output.state mid.sentence =
+ { ", " * write$ }
+ { output.state after.block =
+ { add.period$ write$
+ newline$
+ "\newblock " write$
+ }
+ { output.state before.all =
+ 'write$
+ { output.state after.slash =
+ { bbl.slash * write$
+ newline$
+ }
+ { add.period$ " " * write$ }
+ if$
+ }
+ if$
+ }
+ if$
+ mid.sentence 'output.state :=
+ }
+ if$
+ s
+}
+
+FUNCTION {output}
+{ duplicate$ empty$
+ 'pop$
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {output.after}
+{ 't :=
+ duplicate$ empty$
+ 'pop$
+ { 's :=
+ output.state mid.sentence =
+ { t * write$ }
+ { output.state after.block =
+ { add.period$ write$
+ newline$
+ "\newblock " write$
+ }
+ { output.state before.all =
+ 'write$
+ { output.state after.slash =
+ { bbl.slash * write$ }
+ { add.period$ " " * write$ }
+ if$
+ }
+ if$
+ }
+ if$
+ mid.sentence 'output.state :=
+ }
+ if$
+ s
+ }
+ if$
+}
+
+FUNCTION {output.check}
+{ 't :=
+ duplicate$ empty$
+ { pop$ "empty " t * " in " * cite$ * warning$ }
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {fin.entry}
+{ add.period$
+ write$
+ newline$
+}
+
+FUNCTION {new.block}
+{ output.state before.all =
+ 'skip$
+ { output.state after.slash =
+ 'skip$
+ { after.block 'output.state := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {new.sentence}
+{ output.state after.block =
+ 'skip$
+ { output.state before.all =
+ 'skip$
+ { output.state after.slash =
+ 'skip$
+ { after.sentence 'output.state := }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {new.slash}
+{ output.state before.all =
+ 'skip$
+ { slash.for.extraction
+ { after.slash 'output.state := }
+ { after.block 'output.state := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {new.block.checka}
+{ empty$
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.block.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.sentence.checka}
+{ empty$
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {new.sentence.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {field.or.null}
+{ duplicate$ empty$
+ { pop$ "" }
+ 'skip$
+ if$
+}
+
+FUNCTION {italicize}
+{ duplicate$ empty$
+ { pop$ "" }
+ { "\textit{" swap$ * "}" * }
+ if$
+}
+
+INTEGERS { byte second.byte }
+
+INTEGERS { char.lang tmp.lang }
+
+STRINGS { tmp.str }
+
+FUNCTION {get.str.lang}
+{ 'tmp.str :=
+ lang.other 'tmp.lang :=
+ #1 'charptr :=
+ tmp.str text.length$ #1 + 'len :=
+ { charptr len < }
+ { tmp.str charptr #1 substring$ chr.to.int$ 'byte :=
+ byte #128 <
+ { charptr #1 + 'charptr :=
+ byte #64 > byte #91 < and byte #96 > byte #123 < and or
+ { lang.en 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ { tmp.str charptr #1 + #1 substring$ chr.to.int$ 'second.byte :=
+ byte #224 <
+ { charptr #2 + 'charptr :=
+ byte #207 > byte #212 < and
+ byte #212 = second.byte #176 < and or
+ { lang.ru 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ { byte #240 <
+ { charptr #3 + 'charptr :=
+ byte #227 > byte #234 < and
+ { lang.zh 'char.lang := }
+ { byte #227 =
+ { second.byte #143 >
+ { lang.zh 'char.lang := }
+ { second.byte #128 > second.byte #132 < and
+ { lang.ja 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ if$
+ }
+ { byte #239 =
+ second.byte #163 > second.byte #172 < and and
+ { lang.zh 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ { charptr #4 + 'charptr :=
+ byte #240 = second.byte #159 > and
+ { lang.zh 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ char.lang tmp.lang >
+ { char.lang 'tmp.lang := }
+ 'skip$
+ if$
+ }
+ while$
+ tmp.lang
+}
+
+FUNCTION {check.entry.lang}
+{ author field.or.null
+ title field.or.null *
+ get.str.lang
+}
+
+FUNCTION {set.entry.lang}
+{ language empty$
+ { check.entry.lang }
+ { language "english" = language "american" = or language "british" = or
+ { lang.en }
+ { language "chinese" =
+ { lang.zh }
+ { language "japanese" =
+ { lang.ja }
+ { language "russian" =
+ { lang.ru }
+ { check.entry.lang }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ 'entry.lang :=
+}
+
+FUNCTION {set.entry.numbered}
+{ type$ "patent" =
+ type$ "standard" = or
+ type$ "techreport" = or
+ { #1 'entry.numbered := }
+ { #0 'entry.numbered := }
+ if$
+}
+
+INTEGERS { nameptr namesleft numnames name.lang }
+
+FUNCTION {format.names}
+{ 's :=
+ #1 'nameptr :=
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { s nameptr "{vv~}{ll}{, jj}{, ff}" format.name$ 't :=
+ nameptr max.num.authors >
+ { bbl.et.al
+ #1 'namesleft :=
+ }
+ { t "others" =
+ { bbl.et.al }
+ { t get.str.lang 'name.lang :=
+ name.lang lang.en =
+ { t #1 "{vv~}{ll}{~f{~}}" format.name$
+ uppercase.name
+ { "u" change.case$ }
+ 'skip$
+ if$
+ t #1 "{, jj}" format.name$ *
+ }
+ { t #1 "{ll}{ff}" format.name$ }
+ if$
+ }
+ if$
+ }
+ if$
+ nameptr #1 >
+ { ", " swap$ * * }
+ 'skip$
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {format.key}
+{ empty$
+ { key field.or.null }
+ { "" }
+ if$
+}
+
+FUNCTION {format.authors}
+{ author empty$ not
+ { author format.names }
+ { "empty author in " cite$ * warning$
+ bbl.anonymous
+ }
+ if$
+}
+
+FUNCTION {format.editors}
+{ editor empty$
+ { "" }
+ { editor format.names }
+ if$
+}
+
+FUNCTION {format.translators}
+{ translator empty$
+ { "" }
+ { translator format.names
+ entry.lang lang.zh =
+ { translator num.names$ #3 >
+ { "译" * }
+ { ", 译" * }
+ if$
+ }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.full.names}
+{'s :=
+ #1 'nameptr :=
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { s nameptr "{vv~}{ll}{, jj}{, ff}" format.name$ 't :=
+ t get.str.lang 'name.lang :=
+ name.lang lang.en =
+ { t #1 "{vv~}{ll}" format.name$ 't := }
+ { t #1 "{ll}{ff}" format.name$ 't := }
+ if$
+ nameptr #1 >
+ {
+ namesleft #1 >
+ { ", " * t * }
+ {
+ numnames #2 >
+ { "," * }
+ 'skip$
+ if$
+ t "others" =
+ { " et~al." * }
+ { " and " * t * }
+ if$
+ }
+ if$
+ }
+ 't
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {author.editor.full}
+{ author empty$
+ { editor empty$
+ { "" }
+ { editor format.full.names }
+ if$
+ }
+ { author format.full.names }
+ if$
+}
+
+FUNCTION {author.full}
+{ author empty$
+ { "" }
+ { author format.full.names }
+ if$
+}
+
+FUNCTION {editor.full}
+{ editor empty$
+ { "" }
+ { editor format.full.names }
+ if$
+}
+
+FUNCTION {make.full.names}
+{ type$ "book" =
+ type$ "inbook" =
+ or
+ 'author.editor.full
+ { type$ "collection" =
+ type$ "proceedings" =
+ or
+ 'editor.full
+ 'author.full
+ if$
+ }
+ if$
+}
+
+FUNCTION {output.bibitem}
+{ newline$
+ "\bibitem[{" write$
+ label write$
+ ")" make.full.names duplicate$ short.list =
+ { pop$ }
+ { * }
+ if$
+ "}]{" * write$
+ cite$ write$
+ "}" write$
+ newline$
+ ""
+ before.all 'output.state :=
+}
+
+FUNCTION {change.sentence.case}
+{ entry.lang lang.en =
+ { "t" change.case$ }
+ 'skip$
+ if$
+}
+
+FUNCTION {add.link}
+{ url empty$ not
+ { "\href{" url * "}{" * swap$ * "}" * }
+ { doi empty$ not
+ { "\href{http://dx.doi.org/" doi * "}{" * swap$ * "}" * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.title}
+{ title empty$
+ { "" }
+ { title
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered number empty$ not and
+ { bbl.colon * number * }
+ 'skip$
+ if$
+ link.title
+ 'add.link
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {tie.or.space.connect}
+{ duplicate$ text.length$ #3 <
+ { "~" }
+ { " " }
+ if$
+ swap$ * *
+}
+
+FUNCTION {either.or.check}
+{ empty$
+ 'pop$
+ { "can't use both " swap$ * " fields in " * cite$ * warning$ }
+ if$
+}
+
+FUNCTION {is.digit}
+{ duplicate$ empty$
+ { pop$ #0 }
+ { chr.to.int$
+ duplicate$ "0" chr.to.int$ <
+ { pop$ #0 }
+ { "9" chr.to.int$ >
+ { #0 }
+ { #1 }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {is.number}
+{ 's :=
+ s empty$
+ { #0 }
+ { s text.length$ 'charptr :=
+ { charptr #0 >
+ s charptr #1 substring$ is.digit
+ and
+ }
+ { charptr #1 - 'charptr := }
+ while$
+ charptr not
+ }
+ if$
+}
+
+FUNCTION {format.volume}
+{ volume empty$ not
+ { volume is.number
+ { entry.lang lang.zh =
+ { "第 " volume * " 卷" * }
+ { "volume" volume tie.or.space.connect }
+ if$
+ }
+ { volume }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.number}
+{ number empty$ not
+ { number is.number
+ { entry.lang lang.zh =
+ { "第 " number * " 册" * }
+ { "number" number tie.or.space.connect }
+ if$
+ }
+ { number }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.volume.number}
+{ volume empty$ not
+ { format.volume }
+ { format.number }
+ if$
+}
+
+FUNCTION {format.title.vol.num}
+{ title
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered
+ { number empty$ not
+ { bbl.colon * number * }
+ 'skip$
+ if$
+ }
+ { format.volume.number 's :=
+ s empty$ not
+ { bbl.colon * s * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.series.vol.num.title}
+{ format.volume.number 's :=
+ series empty$ not
+ { series
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered
+ { bbl.wide.space * }
+ { bbl.colon *
+ s empty$ not
+ { s * bbl.wide.space * }
+ 'skip$
+ if$
+ }
+ if$
+ title *
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered number empty$ not and
+ { bbl.colon * number * }
+ 'skip$
+ if$
+ }
+ { format.title.vol.num }
+ if$
+ link.title
+ 'add.link
+ 'skip$
+ if$
+}
+
+FUNCTION {format.booktitle.vol.num}
+{ booktitle
+ entry.numbered
+ 'skip$
+ { format.volume.number 's :=
+ s empty$ not
+ { bbl.colon * s * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.series.vol.num.booktitle}
+{ format.volume.number 's :=
+ series empty$ not
+ { series bbl.colon *
+ entry.numbered not s empty$ not and
+ { s * bbl.wide.space * }
+ 'skip$
+ if$
+ booktitle *
+ }
+ { format.booktitle.vol.num }
+ if$
+ in.booktitle
+ { duplicate$ empty$ not entry.lang lang.en = and
+ { "In: " swap$ * }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {format.journal}
+{ journal
+ italic.jounal
+ 'italicize
+ 'skip$
+ if$
+}
+
+FUNCTION {set.entry.mark}
+{ entry.mark empty$ not
+ 'pop$
+ { mark empty$ not
+ { pop$ mark 'entry.mark := }
+ { 'entry.mark := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.mark}
+{ show.mark
+ { medium empty$ not
+ { entry.mark "/" * medium * 'entry.mark := }
+ { entry.is.electronic
+ { entry.mark "/OL" * 'entry.mark := }
+ 'skip$
+ if$
+ }
+ if$
+ "\allowbreak[" entry.mark * "]" *
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {num.to.ordinal}
+{ duplicate$ text.length$ 'charptr :=
+ duplicate$ charptr #1 substring$ 's :=
+ s "1" =
+ { "st" * }
+ { s "2" =
+ { "nd" * }
+ { s "3" =
+ { "rd" * }
+ { "th" * }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.edition}
+{ edition empty$
+ { "" }
+ { edition is.number
+ { entry.lang lang.zh =
+ { edition " 版" * }
+ { edition num.to.ordinal " ed." * }
+ if$
+ }
+ { entry.lang lang.en =
+ { edition change.sentence.case 's :=
+ s "Revised" = s "Revised edition" = or
+ { "Rev. ed." }
+ { s " ed." *}
+ if$
+ }
+ { edition }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.publisher}
+{ publisher empty$ not
+ { publisher }
+ { school empty$ not
+ { school }
+ { organization empty$ not
+ { organization }
+ { institution empty$ not
+ { institution }
+ { "" }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.address.publisher}
+{ address empty$ not
+ { address
+ format.publisher empty$ not
+ { bbl.colon * format.publisher * }
+ { entry.is.electronic not show.missing.address.publisher and
+ { bbl.colon * bbl.sine.nomine * }
+ 'skip$
+ if$
+ }
+ if$
+ }
+ { entry.is.electronic not show.missing.address.publisher and
+ { format.publisher empty$ not
+ { bbl.sine.loco bbl.colon * format.publisher * }
+ { bbl.sine.loco.sine.nomine }
+ if$
+ }
+ { format.publisher empty$ not
+ { format.publisher }
+ { "" }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {extract.before.dash}
+{ duplicate$ empty$
+ { pop$ "" }
+ { 's :=
+ #1 'charptr :=
+ s text.length$ #1 + 'len :=
+ { charptr len <
+ s charptr #1 substring$ "-" = not
+ and
+ }
+ { charptr #1 + 'charptr := }
+ while$
+ s #1 charptr #1 - substring$
+ }
+ if$
+}
+
+FUNCTION {extract.after.dash}
+{ duplicate$ empty$
+ { pop$ "" }
+ { 's :=
+ #1 'charptr :=
+ s text.length$ #1 + 'len :=
+ { charptr len <
+ s charptr #1 substring$ "-" = not
+ and
+ }
+ { charptr #1 + 'charptr := }
+ while$
+ { charptr len <
+ s charptr #1 substring$ "-" =
+ and
+ }
+ { charptr #1 + 'charptr := }
+ while$
+ s charptr global.max$ substring$
+ }
+ if$
+}
+
+FUNCTION {contains.dash}
+{ duplicate$ empty$
+ { pop$ #0 }
+ { 's :=
+ { s empty$ not
+ s #1 #1 substring$ "-" = not
+ and
+ }
+ { s #2 global.max$ substring$ 's := }
+ while$
+ s empty$ not
+ }
+ if$
+}
+
+FUNCTION {format.year}
+{ year empty$ not
+ { year extract.before.dash }
+ { date empty$ not
+ { date extract.before.dash }
+ { "empty year in " cite$ * warning$
+ urldate empty$ not
+ { "[" urldate extract.before.dash * "]" * }
+ { "" }
+ if$
+ }
+ if$
+ }
+ if$
+ extra.label *
+}
+
+FUNCTION {format.date}
+{ type$ "patent" = type$ "newspaper" = or
+ date empty$ not and
+ { date }
+ { year }
+ if$
+}
+
+FUNCTION {format.editdate}
+{ date empty$ not
+ { "\allowbreak(" date * ")" * }
+ { "" }
+ if$
+}
+
+FUNCTION {format.urldate}
+{ urldate empty$ not entry.is.electronic and
+ { "\allowbreak[" urldate * "]" * }
+ { "" }
+ if$
+}
+
+FUNCTION {hyphenate}
+{ 't :=
+ ""
+ { t empty$ not }
+ { t #1 #1 substring$ "-" =
+ { "-" *
+ { t #1 #1 substring$ "-" = }
+ { t #2 global.max$ substring$ 't := }
+ while$
+ }
+ { t #1 #1 substring$ *
+ t #2 global.max$ substring$ 't :=
+ }
+ if$
+ }
+ while$
+}
+
+FUNCTION {format.pages}
+{ pages empty$
+ { "" }
+ { pages hyphenate }
+ if$
+}
+
+FUNCTION {format.journal.volume}
+{ volume empty$ not
+ { bold.journal.volume
+ { "\textbf{" volume * "}" * }
+ { volume }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.journal.number}
+{ number empty$ not
+ { "\penalty0 (" number * ")" * }
+ { "" }
+ if$
+}
+
+FUNCTION {format.journal.pages}
+{ pages empty$
+ { "" }
+ { ":\penalty0 " pages hyphenate * }
+ if$
+}
+
+FUNCTION {format.periodical.year.volume.number}
+{ year empty$ not
+ { year extract.before.dash }
+ { "empty year in periodical " cite$ * warning$ }
+ if$
+ volume empty$ not
+ { ", " * volume extract.before.dash * }
+ 'skip$
+ if$
+ number empty$ not
+ { "\penalty0 (" * number extract.before.dash * ")" * }
+ 'skip$
+ if$
+ year contains.dash
+ { "--" *
+ year extract.after.dash empty$
+ volume extract.after.dash empty$ and
+ number extract.after.dash empty$ and not
+ { year extract.after.dash empty$ not
+ { year extract.after.dash * }
+ { year extract.before.dash * }
+ if$
+ volume empty$ not
+ { ", " * volume extract.after.dash * }
+ 'skip$
+ if$
+ number empty$ not
+ { "\penalty0 (" * number extract.after.dash * ")" * }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {check.url}
+{ url empty$ not
+ { "\url{" url * "}" * 'entry.url :=
+ #1 'entry.is.electronic :=
+ }
+ { howpublished empty$ not
+ { howpublished #1 #5 substring$ "\url{" =
+ { howpublished 'entry.url :=
+ #1 'entry.is.electronic :=
+ }
+ 'skip$
+ if$
+ }
+ { note empty$ not
+ { note #1 #5 substring$ "\url{" =
+ { note 'entry.url :=
+ #1 'entry.is.electronic :=
+ }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.url}
+{ entry.url empty$ not
+ { new.block entry.url }
+ { "" }
+ if$
+}
+
+FUNCTION {check.doi}
+{ doi empty$ not
+ { #1 'entry.is.electronic := }
+ 'skip$
+ if$
+}
+
+FUNCTION {is.in.url}
+{ 's :=
+ s empty$
+ { #1 }
+ { entry.url empty$
+ { #0 }
+ { s text.length$ 'len :=
+ entry.url text.length$ 'charptr :=
+ { entry.url charptr len substring$ s = not
+ charptr #0 >
+ and
+ }
+ { charptr #1 - 'charptr := }
+ while$
+ charptr
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.doi}
+{ ""
+ doi empty$ not show.doi and
+ { "" 's :=
+ doi 't :=
+ #0 'numnames :=
+ { t empty$ not}
+ { t #1 #1 substring$ 'tmp.str :=
+ tmp.str "," = tmp.str " " = or t #2 #1 substring$ empty$ or
+ { t #2 #1 substring$ empty$
+ { s tmp.str * 's := }
+ 'skip$
+ if$
+ s empty$ s is.in.url or
+ 'skip$
+ { numnames #1 + 'numnames :=
+ numnames #1 >
+ { ", " * }
+ { "DOI: " * }
+ if$
+ "\doi{" s * "}" * *
+ }
+ if$
+ "" 's :=
+ }
+ { s tmp.str * 's := }
+ if$
+ t #2 global.max$ substring$ 't :=
+ }
+ while$
+ 's :=
+ s empty$ not
+ { new.block s }
+ { "" }
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {check.electronic}
+{ "" 'entry.url :=
+ #0 'entry.is.electronic :=
+ 'check.doi
+ 'skip$
+ if$
+ 'check.url
+ 'skip$
+ if$
+ medium empty$ not
+ { medium "MT" = medium "DK" = or medium "CD" = or medium "OL" = or
+ { #1 'entry.is.electronic := }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {format.note}
+{ note empty$ not show.note and
+ { note }
+ { "" }
+ if$
+}
+
+FUNCTION {empty.misc.check}
+{ author empty$ title empty$
+ year empty$
+ and and
+ key empty$ not and
+ { "all relevant fields are empty in " cite$ * warning$ }
+ 'skip$
+ if$
+}
+
+FUNCTION {monograph}
+{ output.bibitem
+ author empty$ not
+ { format.authors }
+ { editor empty$ not
+ { format.editors }
+ { "empty author and editor in " cite$ * warning$
+ bbl.anonymous
+ }
+ if$
+ }
+ if$
+ output
+ new.block
+ format.series.vol.num.title "title" output.check
+ "M" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.translators output
+ new.sentence
+ format.edition output
+ new.block
+ format.address.publisher output
+ format.year "year" output.check
+ format.pages bbl.colon output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {incollection}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "M" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.translators output
+ new.slash
+ format.editors output
+ new.block
+ format.series.vol.num.booktitle "booktitle" output.check
+ new.block
+ format.edition output
+ new.block
+ format.address.publisher output
+ format.year "year" output.check
+ format.pages bbl.colon output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {periodical}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "J" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.periodical.year.volume.number output
+ new.block
+ format.address.publisher output
+ format.date "year" output.check
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {article}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "J" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.journal "journal" output.check
+ format.date "year" output.check
+ format.journal.volume output
+ format.journal.number "" output.after
+ format.journal.pages "" output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {patent}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "P" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.date "year" output.check
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {electronic}
+{ #1 #1 check.electronic
+ #1 'entry.is.electronic :=
+ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.series.vol.num.title "title" output.check
+ "EB" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.address.publisher output
+ date empty$
+ { format.date output }
+ 'skip$
+ if$
+ format.pages bbl.colon output.after
+ format.editdate "" output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {misc}
+{ journal empty$ not
+ 'article
+ { booktitle empty$ not
+ 'incollection
+ { publisher empty$ not
+ 'monograph
+ { entry.is.electronic
+ 'electronic
+ { "Z" set.entry.mark
+ monograph
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ empty.misc.check
+}
+
+FUNCTION {archive}
+{ "A" set.entry.mark
+ misc
+}
+
+FUNCTION {book} { monograph }
+
+FUNCTION {booklet} { book }
+
+FUNCTION {collection}
+{ "G" set.entry.mark
+ monograph
+}
+
+FUNCTION {database}
+{ "DB" set.entry.mark
+ electronic
+}
+
+FUNCTION {dataset}
+{ "DS" set.entry.mark
+ electronic
+}
+
+FUNCTION {inbook} { book }
+
+FUNCTION {inproceedings}
+{ "C" set.entry.mark
+ incollection
+}
+
+FUNCTION {conference} { inproceedings }
+
+FUNCTION {map}
+{ "CM" set.entry.mark
+ misc
+}
+
+FUNCTION {manual} { monograph }
+
+FUNCTION {mastersthesis}
+{ "D" set.entry.mark
+ monograph
+}
+
+FUNCTION {newspaper}
+{ "N" set.entry.mark
+ article
+}
+
+FUNCTION {online}
+{ "EB" set.entry.mark
+ electronic
+}
+
+FUNCTION {phdthesis} { mastersthesis }
+
+FUNCTION {proceedings}
+{ "C" set.entry.mark
+ monograph
+}
+
+FUNCTION {software}
+{ "CP" set.entry.mark
+ electronic
+}
+
+FUNCTION {standard}
+{ "S" set.entry.mark
+ misc
+}
+
+FUNCTION {techreport}
+{ "R" set.entry.mark
+ misc
+}
+
+FUNCTION {unpublished}
+{ "Z" set.entry.mark
+ misc
+}
+
+FUNCTION {default.type} { misc }
+
+MACRO {jan} {"January"}
+
+MACRO {feb} {"February"}
+
+MACRO {mar} {"March"}
+
+MACRO {apr} {"April"}
+
+MACRO {may} {"May"}
+
+MACRO {jun} {"June"}
+
+MACRO {jul} {"July"}
+
+MACRO {aug} {"August"}
+
+MACRO {sep} {"September"}
+
+MACRO {oct} {"October"}
+
+MACRO {nov} {"November"}
+
+MACRO {dec} {"December"}
+
+MACRO {acmcs} {"ACM Computing Surveys"}
+
+MACRO {acta} {"Acta Informatica"}
+
+MACRO {cacm} {"Communications of the ACM"}
+
+MACRO {ibmjrd} {"IBM Journal of Research and Development"}
+
+MACRO {ibmsj} {"IBM Systems Journal"}
+
+MACRO {ieeese} {"IEEE Transactions on Software Engineering"}
+
+MACRO {ieeetc} {"IEEE Transactions on Computers"}
+
+MACRO {ieeetcad}
+ {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"}
+
+MACRO {ipl} {"Information Processing Letters"}
+
+MACRO {jacm} {"Journal of the ACM"}
+
+MACRO {jcss} {"Journal of Computer and System Sciences"}
+
+MACRO {scp} {"Science of Computer Programming"}
+
+MACRO {sicomp} {"SIAM Journal on Computing"}
+
+MACRO {tocs} {"ACM Transactions on Computer Systems"}
+
+MACRO {tods} {"ACM Transactions on Database Systems"}
+
+MACRO {tog} {"ACM Transactions on Graphics"}
+
+MACRO {toms} {"ACM Transactions on Mathematical Software"}
+
+MACRO {toois} {"ACM Transactions on Office Information Systems"}
+
+MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"}
+
+MACRO {tcs} {"Theoretical Computer Science"}
+
+FUNCTION {sortify}
+{ purify$
+ "l" change.case$
+}
+
+FUNCTION {chop.word}
+{ 's :=
+ 'len :=
+ s #1 len substring$ =
+ { s len #1 + global.max$ substring$ }
+ 's
+ if$
+}
+
+FUNCTION {format.lab.name}
+{ "{vv~}{ll}{, jj}{, ff}" format.name$
+ duplicate$ get.str.lang 'name.lang :=
+ name.lang lang.zh = name.lang lang.ja = or
+ { #1 "{ll}{ff}" format.name$ }
+ { #1 "{vv~}{ll}" format.name$ }
+ if$
+}
+
+FUNCTION {format.lab.names}
+{ 's :=
+ s #1 format.lab.name
+ s num.names$ #2 = name.lang lang.en = name.lang lang.ru = or and
+ { s #2 format.lab.name 't :=
+ t "others" =
+ { bbl.space * citation.et.al * }
+ { " " * citation.and * " " * t * }
+ if$
+ }
+ { s num.names$ #1 >
+ { bbl.space * citation.et.al * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {author.key.label}
+{ author empty$
+ { key empty$
+ { cite$ #1 #3 substring$ }
+ 'key
+ if$
+ }
+ { author format.lab.names }
+ if$
+}
+
+FUNCTION {author.editor.key.label}
+{ author empty$
+ { editor empty$
+ { key empty$
+ { cite$ #1 #3 substring$ }
+ 'key
+ if$
+ }
+ { editor format.lab.names }
+ if$
+ }
+ { author format.lab.names }
+ if$
+}
+
+FUNCTION {author.key.organization.label}
+{ author empty$
+ { key empty$
+ { organization empty$
+ { cite$ #1 #3 substring$ }
+ { "The " #4 organization chop.word #3 text.prefix$ }
+ if$
+ }
+ 'key
+ if$
+ }
+ { author format.lab.names }
+ if$
+}
+
+FUNCTION {editor.key.organization.label}
+{ editor empty$
+ { key empty$
+ { organization empty$
+ { cite$ #1 #3 substring$ }
+ { "The " #4 organization chop.word #3 text.prefix$ }
+ if$
+ }
+ 'key
+ if$
+ }
+ { editor format.lab.names }
+ if$
+}
+
+FUNCTION {calc.short.authors}
+{ type$ "book" =
+ type$ "inbook" =
+ or
+ 'author.editor.key.label
+ { type$ "collection" =
+ type$ "proceedings" =
+ or
+ { editor empty$ not
+ 'editor.key.organization.label
+ 'author.key.organization.label
+ if$
+ }
+ 'author.key.label
+ if$
+ }
+ if$
+ 'short.list :=
+}
+
+FUNCTION {calc.label}
+{ calc.short.authors
+ short.list
+ "("
+ *
+ format.year duplicate$ empty$
+ short.list key field.or.null = or
+ { pop$ "" }
+ 'skip$
+ if$
+ *
+ 'label :=
+}
+
+FUNCTION {sort.language.label}
+{ entry.lang lang.zh =
+ { "a zh " }
+ { entry.lang lang.ja =
+ { "b ja " }
+ { entry.lang lang.en =
+ { "c en " }
+ { entry.lang lang.ru =
+ { "d ru " }
+ { "e other " }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {sort.format.names}
+{ 's :=
+ #1 'nameptr :=
+ ""
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ {
+ s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't :=
+ nameptr #1 >
+ {
+ " " *
+ namesleft #1 = t "others" = and
+ { "zzzzz" * }
+ { numnames #2 > nameptr #2 = and
+ { "zz" * year field.or.null * " " * }
+ 'skip$
+ if$
+ t sortify *
+ }
+ if$
+ }
+ { t sortify * }
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {sort.format.title}
+{ 't :=
+ "A " #2
+ "An " #3
+ "The " #4 t chop.word
+ chop.word
+ chop.word
+ sortify
+ #1 global.max$ substring$
+}
+
+FUNCTION {anonymous.sort}
+{ entry.lang lang.zh =
+ { "yi4 ming2" }
+ { "anon" }
+ if$
+}
+
+FUNCTION {warn.empty.key}
+{ entry.lang lang.zh =
+ { "empty key in " cite$ * warning$ }
+ 'skip$
+ if$
+}
+
+FUNCTION {author.sort}
+{ key empty$
+ { warn.empty.key
+ author empty$
+ { anonymous.sort }
+ { author sort.format.names }
+ if$
+ }
+ { key sortify }
+ if$
+}
+
+FUNCTION {author.editor.sort}
+{ key empty$
+ { warn.empty.key
+ author empty$
+ { editor empty$
+ { anonymous.sort }
+ { editor sort.format.names }
+ if$
+ }
+ { author sort.format.names }
+ if$
+ }
+ { key sortify }
+ if$
+}
+
+FUNCTION {author.organization.sort}
+{ key empty$
+ { warn.empty.key
+ author empty$
+ { organization empty$
+ { anonymous.sort }
+ { "The " #4 organization chop.word sortify }
+ if$
+ }
+ { author sort.format.names }
+ if$
+ }
+ { key sortify }
+ if$
+}
+
+FUNCTION {editor.organization.sort}
+{ key empty$
+ { warn.empty.key
+ editor empty$
+ { organization empty$
+ { anonymous.sort }
+ { "The " #4 organization chop.word sortify }
+ if$
+ }
+ { editor sort.format.names }
+ if$
+ }
+ { key sortify }
+ if$
+}
+
+FUNCTION {presort}
+{ set.entry.lang
+ set.entry.numbered
+ show.url show.doi check.electronic
+ calc.label
+ label sortify
+ " "
+ *
+ sort.language.label
+ type$ "book" =
+ type$ "inbook" =
+ or
+ 'author.editor.sort
+ { type$ "collection" =
+ type$ "proceedings" =
+ or
+ 'editor.organization.sort
+ 'author.sort
+ if$
+ }
+ if$
+ *
+ " "
+ *
+ year field.or.null sortify
+ *
+ " "
+ *
+ cite$
+ *
+ #1 entry.max$ substring$
+ 'sort.label :=
+ sort.label *
+ #1 entry.max$ substring$
+ 'sort.key$ :=
+}
+
+STRINGS { longest.label last.label next.extra }
+
+INTEGERS { longest.label.width last.extra.num number.label }
+
+FUNCTION {initialize.longest.label}
+{ "" 'longest.label :=
+ #0 int.to.chr$ 'last.label :=
+ "" 'next.extra :=
+ #0 'longest.label.width :=
+ #0 'last.extra.num :=
+ #0 'number.label :=
+}
+
+FUNCTION {forward.pass}
+{ last.label label =
+ { last.extra.num #1 + 'last.extra.num :=
+ last.extra.num int.to.chr$ 'extra.label :=
+ }
+ { "a" chr.to.int$ 'last.extra.num :=
+ "" 'extra.label :=
+ label 'last.label :=
+ }
+ if$
+ number.label #1 + 'number.label :=
+}
+
+FUNCTION {reverse.pass}
+{ next.extra "b" =
+ { "a" 'extra.label := }
+ 'skip$
+ if$
+ extra.label 'next.extra :=
+ extra.label
+ duplicate$ empty$
+ 'skip$
+ { "{\natexlab{" swap$ * "}}" * }
+ if$
+ 'extra.label :=
+ label extra.label * 'label :=
+}
+
+FUNCTION {bib.sort.order}
+{ sort.label 'sort.key$ :=
+}
+
+FUNCTION {begin.bib}
+{ preamble$ empty$
+ 'skip$
+ { preamble$ write$ newline$ }
+ if$
+ "\begin{thebibliography}{" number.label int.to.str$ * "}" *
+ write$ newline$
+ "\providecommand{\natexlab}[1]{#1}"
+ write$ newline$
+ "\providecommand{\url}[1]{#1}"
+ write$ newline$
+ "\expandafter\ifx\csname urlstyle\endcsname\relax\relax\else"
+ write$ newline$
+ " \urlstyle{same}\fi"
+ write$ newline$
+ show.doi
+ { "\providecommand{\href}[2]{\url{#2}}"
+ write$ newline$
+ "\providecommand{\doi}[1]{\href{https://doi.org/#1}{#1}}"
+ write$ newline$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {end.bib}
+{ newline$
+ "\end{thebibliography}" write$ newline$
+}
+
+READ
+
+EXECUTE {init.state.consts}
+
+EXECUTE {load.config}
+
+ITERATE {presort}
+
+SORT
+
+EXECUTE {initialize.longest.label}
+
+ITERATE {forward.pass}
+
+REVERSE {reverse.pass}
+
+ITERATE {bib.sort.order}
+
+SORT
+
+EXECUTE {begin.bib}
+
+ITERATE {call.type$}
+
+EXECUTE {end.bib}
diff --git a/Biblio/gbt7714-unsrt.bst b/Biblio/gbt7714-unsrt.bst
new file mode 100644
index 0000000..bafa328
--- /dev/null
+++ b/Biblio/gbt7714-unsrt.bst
@@ -0,0 +1,1874 @@
+%%
+%% This is file `gbt7714-unsrt.bst',
+%% generated with the docstrip utility.
+%%
+%% The original source files were:
+%%
+%% gbt7714.dtx (with options: `2015,numerical')
+%% -------------------------------------------------------------------
+%% GB/T 7714-2015 BibTeX Style
+%% https://github.com/CTeX-org/gbt7714-bibtex-style
+%% Version: 2019/01/02 v1.1
+%% -------------------------------------------------------------------
+%% Copyright (C) 2016-2019 by Zeping Lee <zepinglee AT gmail.com>
+%% -------------------------------------------------------------------
+%% This file may be distributed and/or modified under the
+%% conditions of the LaTeX Project Public License, either version 1.3c
+%% of this license or (at your option) any later version.
+%% The latest version of this license is in
+%% https://www.latex-project.org/lppl.txt
+%% and version 1.3c or later is part of all distributions of LaTeX
+%% version 2005/12/01 or later.
+%% -------------------------------------------------------------------
+
+INTEGERS {
+ uppercase.name
+ max.num.authors
+ period.between.author.year
+ sentence.case.title
+ link.title
+ show.mark
+ slash.for.extraction
+ in.booktitle
+ italic.jounal
+ bold.journal.volume
+ show.missing.address.publisher
+ show.url
+ show.doi
+ show.note
+}
+
+FUNCTION {load.config}
+{
+ #1 'uppercase.name :=
+ #3 'max.num.authors :=
+ #0 'period.between.author.year :=
+ #1 'sentence.case.title :=
+ #0 'link.title :=
+ #1 'show.mark :=
+ #1 'slash.for.extraction :=
+ #0 'in.booktitle :=
+ #0 'italic.jounal :=
+ #0 'bold.journal.volume :=
+ #0 'show.missing.address.publisher :=
+ #1 'show.url :=
+ #1 'show.doi :=
+ #0 'show.note :=
+}
+
+ENTRY
+ { address
+ author
+ booktitle
+ date
+ doi
+ edition
+ editor
+ howpublished
+ institution
+ journal
+ key
+ language
+ mark
+ medium
+ note
+ number
+ organization
+ pages
+ publisher
+ school
+ series
+ title
+ translator
+ url
+ urldate
+ volume
+ year
+ }
+ { entry.lang entry.is.electronic entry.numbered }
+ { label extra.label sort.label short.list entry.mark entry.url }
+
+INTEGERS { output.state before.all mid.sentence after.sentence after.block after.slash }
+
+INTEGERS { lang.zh lang.ja lang.en lang.ru lang.other }
+
+INTEGERS { charptr len }
+
+FUNCTION {init.state.consts}
+{ #0 'before.all :=
+ #1 'mid.sentence :=
+ #2 'after.sentence :=
+ #3 'after.block :=
+ #4 'after.slash :=
+ #3 'lang.zh :=
+ #4 'lang.ja :=
+ #1 'lang.en :=
+ #2 'lang.ru :=
+ #0 'lang.other :=
+}
+
+FUNCTION {bbl.anonymous}
+{ entry.lang lang.zh =
+ { "佚名" }
+ { "Anon" }
+ if$
+}
+
+FUNCTION {bbl.space}
+{ entry.lang lang.zh =
+ { "\ " }
+ { " " }
+ if$
+}
+
+FUNCTION {bbl.et.al}
+{ entry.lang lang.zh =
+ { "等" }
+ { entry.lang lang.ja =
+ { "他" }
+ { entry.lang lang.ru =
+ { "идр" }
+ { "et~al." }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {citation.et.al}
+{ entry.lang lang.zh =
+ { "等" }
+ { "{\bibetal}" }
+ if$
+}
+
+FUNCTION {citation.and} { "{\biband}" }
+
+FUNCTION {bbl.colon} { ": " }
+
+FUNCTION {bbl.wide.space} { "\quad " }
+
+FUNCTION {bbl.slash} { "//\allowbreak " }
+
+FUNCTION {bbl.sine.loco}
+{ entry.lang lang.zh =
+ { "[出版地不详]" }
+ { "[S.l.]" }
+ if$
+}
+
+FUNCTION {bbl.sine.nomine}
+{ entry.lang lang.zh =
+ { "[出版者不详]" }
+ { "[s.n.]" }
+ if$
+}
+
+FUNCTION {bbl.sine.loco.sine.nomine}
+{ entry.lang lang.zh =
+ { "[出版地不详: 出版者不详]" }
+ { "[S.l.: s.n.]" }
+ if$
+}
+
+FUNCTION {not}
+{ { #0 }
+ { #1 }
+ if$
+}
+
+FUNCTION {and}
+{ 'skip$
+ { pop$ #0 }
+ if$
+}
+
+FUNCTION {or}
+{ { pop$ #1 }
+ 'skip$
+ if$
+}
+
+STRINGS { s t }
+
+FUNCTION {output.nonnull}
+{ 's :=
+ output.state mid.sentence =
+ { ", " * write$ }
+ { output.state after.block =
+ { add.period$ write$
+ newline$
+ "\newblock " write$
+ }
+ { output.state before.all =
+ 'write$
+ { output.state after.slash =
+ { bbl.slash * write$
+ newline$
+ }
+ { add.period$ " " * write$ }
+ if$
+ }
+ if$
+ }
+ if$
+ mid.sentence 'output.state :=
+ }
+ if$
+ s
+}
+
+FUNCTION {output}
+{ duplicate$ empty$
+ 'pop$
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {output.after}
+{ 't :=
+ duplicate$ empty$
+ 'pop$
+ { 's :=
+ output.state mid.sentence =
+ { t * write$ }
+ { output.state after.block =
+ { add.period$ write$
+ newline$
+ "\newblock " write$
+ }
+ { output.state before.all =
+ 'write$
+ { output.state after.slash =
+ { bbl.slash * write$ }
+ { add.period$ " " * write$ }
+ if$
+ }
+ if$
+ }
+ if$
+ mid.sentence 'output.state :=
+ }
+ if$
+ s
+ }
+ if$
+}
+
+FUNCTION {output.check}
+{ 't :=
+ duplicate$ empty$
+ { pop$ "empty " t * " in " * cite$ * warning$ }
+ 'output.nonnull
+ if$
+}
+
+FUNCTION {fin.entry}
+{ add.period$
+ write$
+ newline$
+}
+
+FUNCTION {new.block}
+{ output.state before.all =
+ 'skip$
+ { output.state after.slash =
+ 'skip$
+ { after.block 'output.state := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {new.sentence}
+{ output.state after.block =
+ 'skip$
+ { output.state before.all =
+ 'skip$
+ { output.state after.slash =
+ 'skip$
+ { after.sentence 'output.state := }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {new.slash}
+{ output.state before.all =
+ 'skip$
+ { slash.for.extraction
+ { after.slash 'output.state := }
+ { after.block 'output.state := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {new.block.checka}
+{ empty$
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.block.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.block
+ if$
+}
+
+FUNCTION {new.sentence.checka}
+{ empty$
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {new.sentence.checkb}
+{ empty$
+ swap$ empty$
+ and
+ 'skip$
+ 'new.sentence
+ if$
+}
+
+FUNCTION {field.or.null}
+{ duplicate$ empty$
+ { pop$ "" }
+ 'skip$
+ if$
+}
+
+FUNCTION {italicize}
+{ duplicate$ empty$
+ { pop$ "" }
+ { "\textit{" swap$ * "}" * }
+ if$
+}
+
+INTEGERS { byte second.byte }
+
+INTEGERS { char.lang tmp.lang }
+
+STRINGS { tmp.str }
+
+FUNCTION {get.str.lang}
+{ 'tmp.str :=
+ lang.other 'tmp.lang :=
+ #1 'charptr :=
+ tmp.str text.length$ #1 + 'len :=
+ { charptr len < }
+ { tmp.str charptr #1 substring$ chr.to.int$ 'byte :=
+ byte #128 <
+ { charptr #1 + 'charptr :=
+ byte #64 > byte #91 < and byte #96 > byte #123 < and or
+ { lang.en 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ { tmp.str charptr #1 + #1 substring$ chr.to.int$ 'second.byte :=
+ byte #224 <
+ { charptr #2 + 'charptr :=
+ byte #207 > byte #212 < and
+ byte #212 = second.byte #176 < and or
+ { lang.ru 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ { byte #240 <
+ { charptr #3 + 'charptr :=
+ byte #227 > byte #234 < and
+ { lang.zh 'char.lang := }
+ { byte #227 =
+ { second.byte #143 >
+ { lang.zh 'char.lang := }
+ { second.byte #128 > second.byte #132 < and
+ { lang.ja 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ if$
+ }
+ { byte #239 =
+ second.byte #163 > second.byte #172 < and and
+ { lang.zh 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ { charptr #4 + 'charptr :=
+ byte #240 = second.byte #159 > and
+ { lang.zh 'char.lang := }
+ { lang.other 'char.lang := }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ char.lang tmp.lang >
+ { char.lang 'tmp.lang := }
+ 'skip$
+ if$
+ }
+ while$
+ tmp.lang
+}
+
+FUNCTION {check.entry.lang}
+{ author field.or.null
+ title field.or.null *
+ get.str.lang
+}
+
+FUNCTION {set.entry.lang}
+{ language empty$
+ { check.entry.lang }
+ { language "english" = language "american" = or language "british" = or
+ { lang.en }
+ { language "chinese" =
+ { lang.zh }
+ { language "japanese" =
+ { lang.ja }
+ { language "russian" =
+ { lang.ru }
+ { check.entry.lang }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ 'entry.lang :=
+}
+
+FUNCTION {set.entry.numbered}
+{ type$ "patent" =
+ type$ "standard" = or
+ type$ "techreport" = or
+ { #1 'entry.numbered := }
+ { #0 'entry.numbered := }
+ if$
+}
+
+INTEGERS { nameptr namesleft numnames name.lang }
+
+FUNCTION {format.names}
+{ 's :=
+ #1 'nameptr :=
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { s nameptr "{vv~}{ll}{, jj}{, ff}" format.name$ 't :=
+ nameptr max.num.authors >
+ { bbl.et.al
+ #1 'namesleft :=
+ }
+ { t "others" =
+ { bbl.et.al }
+ { t get.str.lang 'name.lang :=
+ name.lang lang.en =
+ { t #1 "{vv~}{ll}{~f{~}}" format.name$
+ uppercase.name
+ { "u" change.case$ }
+ 'skip$
+ if$
+ t #1 "{, jj}" format.name$ *
+ }
+ { t #1 "{ll}{ff}" format.name$ }
+ if$
+ }
+ if$
+ }
+ if$
+ nameptr #1 >
+ { ", " swap$ * * }
+ 'skip$
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {format.key}
+{ empty$
+ { key field.or.null }
+ { "" }
+ if$
+}
+
+FUNCTION {format.authors}
+{ author empty$ not
+ { author format.names }
+ { "empty author in " cite$ * warning$
+ ""
+ }
+ if$
+}
+
+FUNCTION {format.editors}
+{ editor empty$
+ { "" }
+ { editor format.names }
+ if$
+}
+
+FUNCTION {format.translators}
+{ translator empty$
+ { "" }
+ { translator format.names
+ entry.lang lang.zh =
+ { translator num.names$ #3 >
+ { "译" * }
+ { ", 译" * }
+ if$
+ }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.full.names}
+{'s :=
+ #1 'nameptr :=
+ s num.names$ 'numnames :=
+ numnames 'namesleft :=
+ { namesleft #0 > }
+ { s nameptr "{vv~}{ll}{, jj}{, ff}" format.name$ 't :=
+ t get.str.lang 'name.lang :=
+ name.lang lang.en =
+ { t #1 "{vv~}{ll}" format.name$ 't := }
+ { t #1 "{ll}{ff}" format.name$ 't := }
+ if$
+ nameptr #1 >
+ {
+ namesleft #1 >
+ { ", " * t * }
+ {
+ numnames #2 >
+ { "," * }
+ 'skip$
+ if$
+ t "others" =
+ { " et~al." * }
+ { " and " * t * }
+ if$
+ }
+ if$
+ }
+ 't
+ if$
+ nameptr #1 + 'nameptr :=
+ namesleft #1 - 'namesleft :=
+ }
+ while$
+}
+
+FUNCTION {author.editor.full}
+{ author empty$
+ { editor empty$
+ { "" }
+ { editor format.full.names }
+ if$
+ }
+ { author format.full.names }
+ if$
+}
+
+FUNCTION {author.full}
+{ author empty$
+ { "" }
+ { author format.full.names }
+ if$
+}
+
+FUNCTION {editor.full}
+{ editor empty$
+ { "" }
+ { editor format.full.names }
+ if$
+}
+
+FUNCTION {make.full.names}
+{ type$ "book" =
+ type$ "inbook" =
+ or
+ 'author.editor.full
+ { type$ "collection" =
+ type$ "proceedings" =
+ or
+ 'editor.full
+ 'author.full
+ if$
+ }
+ if$
+}
+
+FUNCTION {output.bibitem}
+{ newline$
+ "\bibitem[{" write$
+ label write$
+ ")" make.full.names duplicate$ short.list =
+ { pop$ }
+ { * }
+ if$
+ "}]{" * write$
+ cite$ write$
+ "}" write$
+ newline$
+ ""
+ before.all 'output.state :=
+}
+
+FUNCTION {change.sentence.case}
+{ entry.lang lang.en =
+ { "t" change.case$ }
+ 'skip$
+ if$
+}
+
+FUNCTION {add.link}
+{ url empty$ not
+ { "\href{" url * "}{" * swap$ * "}" * }
+ { doi empty$ not
+ { "\href{http://dx.doi.org/" doi * "}{" * swap$ * "}" * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.title}
+{ title empty$
+ { "" }
+ { title
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered number empty$ not and
+ { bbl.colon * number * }
+ 'skip$
+ if$
+ link.title
+ 'add.link
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {tie.or.space.connect}
+{ duplicate$ text.length$ #3 <
+ { "~" }
+ { " " }
+ if$
+ swap$ * *
+}
+
+FUNCTION {either.or.check}
+{ empty$
+ 'pop$
+ { "can't use both " swap$ * " fields in " * cite$ * warning$ }
+ if$
+}
+
+FUNCTION {is.digit}
+{ duplicate$ empty$
+ { pop$ #0 }
+ { chr.to.int$
+ duplicate$ "0" chr.to.int$ <
+ { pop$ #0 }
+ { "9" chr.to.int$ >
+ { #0 }
+ { #1 }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {is.number}
+{ 's :=
+ s empty$
+ { #0 }
+ { s text.length$ 'charptr :=
+ { charptr #0 >
+ s charptr #1 substring$ is.digit
+ and
+ }
+ { charptr #1 - 'charptr := }
+ while$
+ charptr not
+ }
+ if$
+}
+
+FUNCTION {format.volume}
+{ volume empty$ not
+ { volume is.number
+ { entry.lang lang.zh =
+ { "第 " volume * " 卷" * }
+ { "volume" volume tie.or.space.connect }
+ if$
+ }
+ { volume }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.number}
+{ number empty$ not
+ { number is.number
+ { entry.lang lang.zh =
+ { "第 " number * " 册" * }
+ { "number" number tie.or.space.connect }
+ if$
+ }
+ { number }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.volume.number}
+{ volume empty$ not
+ { format.volume }
+ { format.number }
+ if$
+}
+
+FUNCTION {format.title.vol.num}
+{ title
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered
+ { number empty$ not
+ { bbl.colon * number * }
+ 'skip$
+ if$
+ }
+ { format.volume.number 's :=
+ s empty$ not
+ { bbl.colon * s * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.series.vol.num.title}
+{ format.volume.number 's :=
+ series empty$ not
+ { series
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered
+ { bbl.wide.space * }
+ { bbl.colon *
+ s empty$ not
+ { s * bbl.wide.space * }
+ 'skip$
+ if$
+ }
+ if$
+ title *
+ sentence.case.title
+ 'change.sentence.case
+ 'skip$
+ if$
+ entry.numbered number empty$ not and
+ { bbl.colon * number * }
+ 'skip$
+ if$
+ }
+ { format.title.vol.num }
+ if$
+ link.title
+ 'add.link
+ 'skip$
+ if$
+}
+
+FUNCTION {format.booktitle.vol.num}
+{ booktitle
+ entry.numbered
+ 'skip$
+ { format.volume.number 's :=
+ s empty$ not
+ { bbl.colon * s * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.series.vol.num.booktitle}
+{ format.volume.number 's :=
+ series empty$ not
+ { series bbl.colon *
+ entry.numbered not s empty$ not and
+ { s * bbl.wide.space * }
+ 'skip$
+ if$
+ booktitle *
+ }
+ { format.booktitle.vol.num }
+ if$
+ in.booktitle
+ { duplicate$ empty$ not entry.lang lang.en = and
+ { "In: " swap$ * }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {format.journal}
+{ journal
+ italic.jounal
+ 'italicize
+ 'skip$
+ if$
+}
+
+FUNCTION {set.entry.mark}
+{ entry.mark empty$ not
+ 'pop$
+ { mark empty$ not
+ { pop$ mark 'entry.mark := }
+ { 'entry.mark := }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.mark}
+{ show.mark
+ { medium empty$ not
+ { entry.mark "/" * medium * 'entry.mark := }
+ { entry.is.electronic
+ { entry.mark "/OL" * 'entry.mark := }
+ 'skip$
+ if$
+ }
+ if$
+ "\allowbreak[" entry.mark * "]" *
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {num.to.ordinal}
+{ duplicate$ text.length$ 'charptr :=
+ duplicate$ charptr #1 substring$ 's :=
+ s "1" =
+ { "st" * }
+ { s "2" =
+ { "nd" * }
+ { s "3" =
+ { "rd" * }
+ { "th" * }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.edition}
+{ edition empty$
+ { "" }
+ { edition is.number
+ { entry.lang lang.zh =
+ { edition " 版" * }
+ { edition num.to.ordinal " ed." * }
+ if$
+ }
+ { entry.lang lang.en =
+ { edition change.sentence.case 's :=
+ s "Revised" = s "Revised edition" = or
+ { "Rev. ed." }
+ { s " ed." *}
+ if$
+ }
+ { edition }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.publisher}
+{ publisher empty$ not
+ { publisher }
+ { school empty$ not
+ { school }
+ { organization empty$ not
+ { organization }
+ { institution empty$ not
+ { institution }
+ { "" }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.address.publisher}
+{ address empty$ not
+ { address
+ format.publisher empty$ not
+ { bbl.colon * format.publisher * }
+ { entry.is.electronic not show.missing.address.publisher and
+ { bbl.colon * bbl.sine.nomine * }
+ 'skip$
+ if$
+ }
+ if$
+ }
+ { entry.is.electronic not show.missing.address.publisher and
+ { format.publisher empty$ not
+ { bbl.sine.loco bbl.colon * format.publisher * }
+ { bbl.sine.loco.sine.nomine }
+ if$
+ }
+ { format.publisher empty$ not
+ { format.publisher }
+ { "" }
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {extract.before.dash}
+{ duplicate$ empty$
+ { pop$ "" }
+ { 's :=
+ #1 'charptr :=
+ s text.length$ #1 + 'len :=
+ { charptr len <
+ s charptr #1 substring$ "-" = not
+ and
+ }
+ { charptr #1 + 'charptr := }
+ while$
+ s #1 charptr #1 - substring$
+ }
+ if$
+}
+
+FUNCTION {extract.after.dash}
+{ duplicate$ empty$
+ { pop$ "" }
+ { 's :=
+ #1 'charptr :=
+ s text.length$ #1 + 'len :=
+ { charptr len <
+ s charptr #1 substring$ "-" = not
+ and
+ }
+ { charptr #1 + 'charptr := }
+ while$
+ { charptr len <
+ s charptr #1 substring$ "-" =
+ and
+ }
+ { charptr #1 + 'charptr := }
+ while$
+ s charptr global.max$ substring$
+ }
+ if$
+}
+
+FUNCTION {contains.dash}
+{ duplicate$ empty$
+ { pop$ #0 }
+ { 's :=
+ { s empty$ not
+ s #1 #1 substring$ "-" = not
+ and
+ }
+ { s #2 global.max$ substring$ 's := }
+ while$
+ s empty$ not
+ }
+ if$
+}
+
+FUNCTION {format.year}
+{ year empty$ not
+ { year extract.before.dash }
+ { date empty$ not
+ { date extract.before.dash }
+ { "empty year in " cite$ * warning$
+ urldate empty$ not
+ { "[" urldate extract.before.dash * "]" * }
+ { "" }
+ if$
+ }
+ if$
+ }
+ if$
+ extra.label *
+}
+
+FUNCTION {format.date}
+{ type$ "patent" = type$ "newspaper" = or
+ date empty$ not and
+ { date }
+ { year }
+ if$
+}
+
+FUNCTION {format.editdate}
+{ date empty$ not
+ { "\allowbreak(" date * ")" * }
+ { "" }
+ if$
+}
+
+FUNCTION {format.urldate}
+{ urldate empty$ not entry.is.electronic and
+ { "\allowbreak[" urldate * "]" * }
+ { "" }
+ if$
+}
+
+FUNCTION {hyphenate}
+{ 't :=
+ ""
+ { t empty$ not }
+ { t #1 #1 substring$ "-" =
+ { "-" *
+ { t #1 #1 substring$ "-" = }
+ { t #2 global.max$ substring$ 't := }
+ while$
+ }
+ { t #1 #1 substring$ *
+ t #2 global.max$ substring$ 't :=
+ }
+ if$
+ }
+ while$
+}
+
+FUNCTION {format.pages}
+{ pages empty$
+ { "" }
+ { pages hyphenate }
+ if$
+}
+
+FUNCTION {format.journal.volume}
+{ volume empty$ not
+ { bold.journal.volume
+ { "\textbf{" volume * "}" * }
+ { volume }
+ if$
+ }
+ { "" }
+ if$
+}
+
+FUNCTION {format.journal.number}
+{ number empty$ not
+ { "\penalty0 (" number * ")" * }
+ { "" }
+ if$
+}
+
+FUNCTION {format.journal.pages}
+{ pages empty$
+ { "" }
+ { ":\penalty0 " pages hyphenate * }
+ if$
+}
+
+FUNCTION {format.periodical.year.volume.number}
+{ year empty$ not
+ { year extract.before.dash }
+ { "empty year in periodical " cite$ * warning$ }
+ if$
+ volume empty$ not
+ { ", " * volume extract.before.dash * }
+ 'skip$
+ if$
+ number empty$ not
+ { "\penalty0 (" * number extract.before.dash * ")" * }
+ 'skip$
+ if$
+ year contains.dash
+ { "--" *
+ year extract.after.dash empty$
+ volume extract.after.dash empty$ and
+ number extract.after.dash empty$ and not
+ { year extract.after.dash empty$ not
+ { year extract.after.dash * }
+ { year extract.before.dash * }
+ if$
+ volume empty$ not
+ { ", " * volume extract.after.dash * }
+ 'skip$
+ if$
+ number empty$ not
+ { "\penalty0 (" * number extract.after.dash * ")" * }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {check.url}
+{ url empty$ not
+ { "\url{" url * "}" * 'entry.url :=
+ #1 'entry.is.electronic :=
+ }
+ { howpublished empty$ not
+ { howpublished #1 #5 substring$ "\url{" =
+ { howpublished 'entry.url :=
+ #1 'entry.is.electronic :=
+ }
+ 'skip$
+ if$
+ }
+ { note empty$ not
+ { note #1 #5 substring$ "\url{" =
+ { note 'entry.url :=
+ #1 'entry.is.electronic :=
+ }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.url}
+{ entry.url empty$ not
+ { new.block entry.url }
+ { "" }
+ if$
+}
+
+FUNCTION {check.doi}
+{ doi empty$ not
+ { #1 'entry.is.electronic := }
+ 'skip$
+ if$
+}
+
+FUNCTION {is.in.url}
+{ 's :=
+ s empty$
+ { #1 }
+ { entry.url empty$
+ { #0 }
+ { s text.length$ 'len :=
+ entry.url text.length$ 'charptr :=
+ { entry.url charptr len substring$ s = not
+ charptr #0 >
+ and
+ }
+ { charptr #1 - 'charptr := }
+ while$
+ charptr
+ }
+ if$
+ }
+ if$
+}
+
+FUNCTION {format.doi}
+{ ""
+ doi empty$ not show.doi and
+ { "" 's :=
+ doi 't :=
+ #0 'numnames :=
+ { t empty$ not}
+ { t #1 #1 substring$ 'tmp.str :=
+ tmp.str "," = tmp.str " " = or t #2 #1 substring$ empty$ or
+ { t #2 #1 substring$ empty$
+ { s tmp.str * 's := }
+ 'skip$
+ if$
+ s empty$ s is.in.url or
+ 'skip$
+ { numnames #1 + 'numnames :=
+ numnames #1 >
+ { ", " * }
+ { "DOI: " * }
+ if$
+ "\doi{" s * "}" * *
+ }
+ if$
+ "" 's :=
+ }
+ { s tmp.str * 's := }
+ if$
+ t #2 global.max$ substring$ 't :=
+ }
+ while$
+ 's :=
+ s empty$ not
+ { new.block s }
+ { "" }
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {check.electronic}
+{ "" 'entry.url :=
+ #0 'entry.is.electronic :=
+ 'check.doi
+ 'skip$
+ if$
+ 'check.url
+ 'skip$
+ if$
+ medium empty$ not
+ { medium "MT" = medium "DK" = or medium "CD" = or medium "OL" = or
+ { #1 'entry.is.electronic := }
+ 'skip$
+ if$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {format.note}
+{ note empty$ not show.note and
+ { note }
+ { "" }
+ if$
+}
+
+FUNCTION {empty.misc.check}
+{ author empty$ title empty$
+ year empty$
+ and and
+ key empty$ not and
+ { "all relevant fields are empty in " cite$ * warning$ }
+ 'skip$
+ if$
+}
+
+FUNCTION {monograph}
+{ output.bibitem
+ author empty$ not
+ { format.authors }
+ { editor empty$ not
+ { format.editors }
+ { "empty author and editor in " cite$ * warning$
+ ""
+ }
+ if$
+ }
+ if$
+ output
+ new.block
+ format.series.vol.num.title "title" output.check
+ "M" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.translators output
+ new.sentence
+ format.edition output
+ new.block
+ format.address.publisher output
+ format.year "year" output.check
+ format.pages bbl.colon output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {incollection}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "M" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.translators output
+ new.slash
+ format.editors output
+ new.block
+ format.series.vol.num.booktitle "booktitle" output.check
+ new.block
+ format.edition output
+ new.block
+ format.address.publisher output
+ format.year "year" output.check
+ format.pages bbl.colon output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {periodical}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "J" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.periodical.year.volume.number output
+ new.block
+ format.address.publisher output
+ format.date "year" output.check
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {article}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "J" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.journal "journal" output.check
+ format.date "year" output.check
+ format.journal.volume output
+ format.journal.number "" output.after
+ format.journal.pages "" output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {patent}
+{ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.title "title" output.check
+ "P" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.date "year" output.check
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {electronic}
+{ #1 #1 check.electronic
+ #1 'entry.is.electronic :=
+ output.bibitem
+ format.authors output
+ author format.key output
+ new.block
+ format.series.vol.num.title "title" output.check
+ "EB" set.entry.mark
+ format.mark "" output.after
+ new.block
+ format.address.publisher output
+ date empty$
+ { format.date output }
+ 'skip$
+ if$
+ format.pages bbl.colon output.after
+ format.editdate "" output.after
+ format.urldate "" output.after
+ format.url output
+ format.doi output
+ new.block
+ format.note output
+ fin.entry
+}
+
+FUNCTION {misc}
+{ journal empty$ not
+ 'article
+ { booktitle empty$ not
+ 'incollection
+ { publisher empty$ not
+ 'monograph
+ { entry.is.electronic
+ 'electronic
+ { "Z" set.entry.mark
+ monograph
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ }
+ if$
+ empty.misc.check
+}
+
+FUNCTION {archive}
+{ "A" set.entry.mark
+ misc
+}
+
+FUNCTION {book} { monograph }
+
+FUNCTION {booklet} { book }
+
+FUNCTION {collection}
+{ "G" set.entry.mark
+ monograph
+}
+
+FUNCTION {database}
+{ "DB" set.entry.mark
+ electronic
+}
+
+FUNCTION {dataset}
+{ "DS" set.entry.mark
+ electronic
+}
+
+FUNCTION {inbook} { book }
+
+FUNCTION {inproceedings}
+{ "C" set.entry.mark
+ incollection
+}
+
+FUNCTION {conference} { inproceedings }
+
+FUNCTION {map}
+{ "CM" set.entry.mark
+ misc
+}
+
+FUNCTION {manual} { monograph }
+
+FUNCTION {mastersthesis}
+{ "D" set.entry.mark
+ monograph
+}
+
+FUNCTION {newspaper}
+{ "N" set.entry.mark
+ article
+}
+
+FUNCTION {online}
+{ "EB" set.entry.mark
+ electronic
+}
+
+FUNCTION {phdthesis} { mastersthesis }
+
+FUNCTION {proceedings}
+{ "C" set.entry.mark
+ monograph
+}
+
+FUNCTION {software}
+{ "CP" set.entry.mark
+ electronic
+}
+
+FUNCTION {standard}
+{ "S" set.entry.mark
+ misc
+}
+
+FUNCTION {techreport}
+{ "R" set.entry.mark
+ misc
+}
+
+FUNCTION {unpublished}
+{ "Z" set.entry.mark
+ misc
+}
+
+FUNCTION {default.type} { misc }
+
+MACRO {jan} {"January"}
+
+MACRO {feb} {"February"}
+
+MACRO {mar} {"March"}
+
+MACRO {apr} {"April"}
+
+MACRO {may} {"May"}
+
+MACRO {jun} {"June"}
+
+MACRO {jul} {"July"}
+
+MACRO {aug} {"August"}
+
+MACRO {sep} {"September"}
+
+MACRO {oct} {"October"}
+
+MACRO {nov} {"November"}
+
+MACRO {dec} {"December"}
+
+MACRO {acmcs} {"ACM Computing Surveys"}
+
+MACRO {acta} {"Acta Informatica"}
+
+MACRO {cacm} {"Communications of the ACM"}
+
+MACRO {ibmjrd} {"IBM Journal of Research and Development"}
+
+MACRO {ibmsj} {"IBM Systems Journal"}
+
+MACRO {ieeese} {"IEEE Transactions on Software Engineering"}
+
+MACRO {ieeetc} {"IEEE Transactions on Computers"}
+
+MACRO {ieeetcad}
+ {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"}
+
+MACRO {ipl} {"Information Processing Letters"}
+
+MACRO {jacm} {"Journal of the ACM"}
+
+MACRO {jcss} {"Journal of Computer and System Sciences"}
+
+MACRO {scp} {"Science of Computer Programming"}
+
+MACRO {sicomp} {"SIAM Journal on Computing"}
+
+MACRO {tocs} {"ACM Transactions on Computer Systems"}
+
+MACRO {tods} {"ACM Transactions on Database Systems"}
+
+MACRO {tog} {"ACM Transactions on Graphics"}
+
+MACRO {toms} {"ACM Transactions on Mathematical Software"}
+
+MACRO {toois} {"ACM Transactions on Office Information Systems"}
+
+MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"}
+
+MACRO {tcs} {"Theoretical Computer Science"}
+
+FUNCTION {sortify}
+{ purify$
+ "l" change.case$
+}
+
+FUNCTION {chop.word}
+{ 's :=
+ 'len :=
+ s #1 len substring$ =
+ { s len #1 + global.max$ substring$ }
+ 's
+ if$
+}
+
+FUNCTION {format.lab.name}
+{ "{vv~}{ll}{, jj}{, ff}" format.name$
+ duplicate$ get.str.lang 'name.lang :=
+ name.lang lang.zh = name.lang lang.ja = or
+ { #1 "{ll}{ff}" format.name$ }
+ { #1 "{vv~}{ll}" format.name$ }
+ if$
+}
+
+FUNCTION {format.lab.names}
+{ 's :=
+ s #1 format.lab.name
+ s num.names$ #2 = name.lang lang.en = name.lang lang.ru = or and
+ { s #2 format.lab.name 't :=
+ t "others" =
+ { bbl.space * citation.et.al * }
+ { " " * citation.and * " " * t * }
+ if$
+ }
+ { s num.names$ #1 >
+ { bbl.space * citation.et.al * }
+ 'skip$
+ if$
+ }
+ if$
+}
+
+FUNCTION {author.key.label}
+{ author empty$
+ { key empty$
+ { cite$ #1 #3 substring$ }
+ 'key
+ if$
+ }
+ { author format.lab.names }
+ if$
+}
+
+FUNCTION {author.editor.key.label}
+{ author empty$
+ { editor empty$
+ { key empty$
+ { cite$ #1 #3 substring$ }
+ 'key
+ if$
+ }
+ { editor format.lab.names }
+ if$
+ }
+ { author format.lab.names }
+ if$
+}
+
+FUNCTION {author.key.organization.label}
+{ author empty$
+ { key empty$
+ { organization empty$
+ { cite$ #1 #3 substring$ }
+ { "The " #4 organization chop.word #3 text.prefix$ }
+ if$
+ }
+ 'key
+ if$
+ }
+ { author format.lab.names }
+ if$
+}
+
+FUNCTION {editor.key.organization.label}
+{ editor empty$
+ { key empty$
+ { organization empty$
+ { cite$ #1 #3 substring$ }
+ { "The " #4 organization chop.word #3 text.prefix$ }
+ if$
+ }
+ 'key
+ if$
+ }
+ { editor format.lab.names }
+ if$
+}
+
+FUNCTION {calc.short.authors}
+{ type$ "book" =
+ type$ "inbook" =
+ or
+ 'author.editor.key.label
+ { type$ "collection" =
+ type$ "proceedings" =
+ or
+ { editor empty$ not
+ 'editor.key.organization.label
+ 'author.key.organization.label
+ if$
+ }
+ 'author.key.label
+ if$
+ }
+ if$
+ 'short.list :=
+}
+
+FUNCTION {calc.label}
+{ calc.short.authors
+ short.list
+ "("
+ *
+ format.year duplicate$ empty$
+ short.list key field.or.null = or
+ { pop$ "" }
+ 'skip$
+ if$
+ *
+ 'label :=
+}
+
+INTEGERS { seq.num }
+
+FUNCTION {init.seq}
+{ #0 'seq.num :=}
+
+FUNCTION {int.to.fix}
+{ "000000000" swap$ int.to.str$ *
+ #-1 #10 substring$
+}
+
+FUNCTION {presort}
+{ set.entry.lang
+ set.entry.numbered
+ show.url show.doi check.electronic
+ calc.label
+ label sortify
+ " "
+ *
+ seq.num #1 + 'seq.num :=
+ seq.num int.to.fix
+ 'sort.label :=
+ sort.label *
+ #1 entry.max$ substring$
+ 'sort.key$ :=
+}
+
+STRINGS { longest.label last.label next.extra }
+
+INTEGERS { longest.label.width last.extra.num number.label }
+
+FUNCTION {initialize.longest.label}
+{ "" 'longest.label :=
+ #0 int.to.chr$ 'last.label :=
+ "" 'next.extra :=
+ #0 'longest.label.width :=
+ #0 'last.extra.num :=
+ #0 'number.label :=
+}
+
+FUNCTION {forward.pass}
+{ last.label label =
+ { last.extra.num #1 + 'last.extra.num :=
+ last.extra.num int.to.chr$ 'extra.label :=
+ }
+ { "a" chr.to.int$ 'last.extra.num :=
+ "" 'extra.label :=
+ label 'last.label :=
+ }
+ if$
+ number.label #1 + 'number.label :=
+}
+
+FUNCTION {reverse.pass}
+{ next.extra "b" =
+ { "a" 'extra.label := }
+ 'skip$
+ if$
+ extra.label 'next.extra :=
+ extra.label
+ duplicate$ empty$
+ 'skip$
+ { "{\natexlab{" swap$ * "}}" * }
+ if$
+ 'extra.label :=
+ label extra.label * 'label :=
+}
+
+FUNCTION {bib.sort.order}
+{ sort.label 'sort.key$ :=
+}
+
+FUNCTION {begin.bib}
+{ preamble$ empty$
+ 'skip$
+ { preamble$ write$ newline$ }
+ if$
+ "\begin{thebibliography}{" number.label int.to.str$ * "}" *
+ write$ newline$
+ "\providecommand{\natexlab}[1]{#1}"
+ write$ newline$
+ "\providecommand{\url}[1]{#1}"
+ write$ newline$
+ "\expandafter\ifx\csname urlstyle\endcsname\relax\relax\else"
+ write$ newline$
+ " \urlstyle{same}\fi"
+ write$ newline$
+ show.doi
+ { "\providecommand{\href}[2]{\url{#2}}"
+ write$ newline$
+ "\providecommand{\doi}[1]{\href{https://doi.org/#1}{#1}}"
+ write$ newline$
+ }
+ 'skip$
+ if$
+}
+
+FUNCTION {end.bib}
+{ newline$
+ "\end{thebibliography}" write$ newline$
+}
+
+READ
+
+EXECUTE {init.state.consts}
+
+EXECUTE {load.config}
+
+EXECUTE {init.seq}
+
+ITERATE {presort}
+
+SORT
+
+EXECUTE {initialize.longest.label}
+
+ITERATE {forward.pass}
+
+REVERSE {reverse.pass}
+
+ITERATE {bib.sort.order}
+
+SORT
+
+EXECUTE {begin.bib}
+
+ITERATE {call.type$}
+
+EXECUTE {end.bib}
diff --git a/Biblio/library.bib b/Biblio/library.bib
new file mode 100644
index 0000000..4983890
--- /dev/null
+++ b/Biblio/library.bib
@@ -0,0 +1,599 @@
+Automatically generated by Mendeley Desktop 1.19
+Any changes to this file will be lost if it is regenerated by Mendeley.
+
+BibTeX export options can be customized via Options -> BibTeX in Mendeley Desktop
+
+@article{Spring2000,
+abstract = {We present a technique for identifying repetitive information transfers and use it to analyze the redundancy of network traffic. Our insight is that dynamic content, streaming media and other traffic that is not caught by today's Web caches is nonetheless likely to derive from similar information. We have therefore adapted similarity detection techniques to the problem of designing a system to eliminate redundant transfers. We identify repeated byte ranges between packets to avoid retransmitting the redundant {\{}data.We{\}} find a high level of redundancy and are able to detect repetition that Web proxy caches are not. In our traces, after Web proxy caching has been applied, an additional 39{\%} of the original volume of Web traffic is found to be redundant. Moreover, because our technique makes no assumptions about {\{}HTTP{\}} protocol syntax or caching semantics, it provides immediate benefits for other types of content, such as streaming media, {\{}FTP{\}} traffic, news and mail.},
+author = {Spring, Neil T. and Wetherall, David},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Spring, Wetherall - 2000 - A protocol-independent technique for eliminating redundant network traffic.pdf:pdf},
+isbn = {1581132247},
+issn = {01464833},
+journal = {Sigcomm},
+number = {4},
+pages = {87--95},
+title = {{A protocol-independent technique for eliminating redundant network traffic}},
+volume = {30},
+year = {2000}
+}
+@inproceedings{zheng2018hashing,
+ title={Hashing Incomplete and Unordered Network Streams},
+ author={Zheng, Chao and Li, Xiang and Liu, Qingyun and Sun, Yong and Fang, Binxing},
+ booktitle={IFIP International Conference on Digital Forensics},
+ pages={199--224},
+ year={2018},
+ organization={Springer}
+}
+@article{Li2017,
+author = {Li, Xiang and Zheng, Chao and Zhang, Chengwei and Li, Shu and Guo, Li and Xu, Jie},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Li et al. - 2017 - AppTwins A new approach to identify app package in network traffic.pdf:pdf},
+isbn = {9781509042432},
+journal = {2017 8th International Conference on Information and Communication Systems, ICICS 2017},
+keywords = {app package,high concurrency,identify method,incomplete capture,network traffic},
+mendeley-groups = {network redundancy},
+pages = {222--227},
+title = {{AppTwins: A new approach to identify app package in network traffic}},
+year = {2017}
+}
+@article{Wang2014,
+abstract = {Feature selection is an important technique for data mining. Despite its importance, most studies of feature selection are restricted to batch learning. Unlike traditional batch learning methods, online learning represents a promising family of efficient and scalable machine learning algorithms for large-scale applications. Most existing studies of online learning require accessing all the attributes/features of training instances. Such a classical setting is not always appropriate for real-world applications when data instances are of high dimensionality or it is expensive to acquire the full set of attributes/features. To address this limitation, we investigate the problem of online feature selection (OFS) in which an online learner is only allowed to maintain a classifier involved only a small and fixed number of features. The key challenge of online feature selection is how to make accurate prediction for an instance using a small number of active features. This is in contrast to the classical setup of online learning where all the features can be used for prediction. We attempt to tackle this challenge by studying sparsity regularization and truncation techniques. Specifically, this article addresses two different tasks of online feature selection: 1) learning with full input, where an learner is allowed to access all the features to decide the subset of active features, and 2) learning with partial input, where only a limited number of features is allowed to be accessed for each instance by the learner. We present novel algorithms to solve each of the two problems and give their performance analysis. We evaluate the performance of the proposed algorithms for online feature selection on several public data sets, and demonstrate their applications to real-world problems including image classification in computer vision and microarray gene expression analysis in bioinformatics. The encouraging results of our experiments validate the efficacy and efficiency of th- proposed techniques.},
+author = {Wang, Jialei and Zhao, Peilin and Hoi, Steven C.H. and Jin, Rong},
+file = {:C$\backslash$:/论文/feature selection/10.1.1.714.1933.pdf:pdf},
+isbn = {1041-4347},
+issn = {10414347},
+journal = {IEEE Transactions on Knowledge and Data Engineering},
+keywords = {Feature selection,big data analytics,classification,large-scale data mining,online learning},
+number = {3},
+pages = {698--710},
+title = {{Online feature selection and its applications}},
+volume = {26},
+year = {2014}
+}
+@article{Lei2013Cooperative,
+ title={Cooperative end-to-end traffic redundancy elimination for reducing cloud bandwidth cost},
+ author={Lei, Yu and Sapra, K. and Shen, Haiying and Lin, Ye},
+ booktitle={IEEE International Conference on Network Protocols},
+ year={2013},
+}
+@article{Aggarwal2010EndRE,
+ title={EndRE: An End-System Redundancy Elimination Service for Enterprises.},
+ author={Aggarwal, Bhavish and Akella, Aditya and Anand, Ashok and Balachandran, Athula and Chitnis, Pushkar and Muthukrishnan, Chitra and Ramjee, Ramachandran and Varghese, George},
+ booktitle={Usenix Symposium on Networked Systems Design & Implementation},
+ year={2010},
+}
+
+@article{Zohar2014,
+abstract = {In this paper, we present PACK (Predictive ACKs), a novel end-to-end traffic redundancy elimination (TRE) system, designed for cloud computing customers. Cloud-based TRE needs to apply a judicious use of cloud resources so that the bandwidth cost reduction combined with the additional cost of TRE computation and storage would be optimized. PACK's main advantage is its capability of offloading the cloud-server TRE effort to end-clients, thus minimizing the processing costs induced by the TRE algorithm. Unlike previous solutions, PACK does not require the server to continuously maintain clients' status. This makes PACK very suitable for pervasive computation environments that combine client mobility and server migration to maintain cloud elasticity. PACK is based on a novel TRE technique, which allows the client to use newly received chunks to identify previously received chunk chains, which in turn can be used as reliable predictors to future transmitted chunks. We present a fully functional PACK implementation, transparent to all TCP-based applications and network devices. Finally, we analyze PACK benefits for cloud users, using traffic traces from various sources. {\textcopyright} 2013 IEEE.},
+archivePrefix = {arXiv},
+arxivId = {arXiv:1505.00810v1},
+author = {Zohar, Eyal and Cidon, Israel and Mokryn, Osnat},
+eprint = {arXiv:1505.00810v1},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Zohar, Cidon, Mokryn - 2014 - PACK Prediction-based cloud bandwidth and cost reduction system.pdf:pdf},
+isbn = {0090-6778},
+issn = {10636692},
+journal = {IEEE/ACM Transactions on Networking},
+keywords = {Caching,Cloud computing,Network optimization,Traffic redundancy elimination},
+number = {1},
+pages = {39--51},
+title = {{PACK: Prediction-based cloud bandwidth and cost reduction system}},
+volume = {22},
+year = {2014}
+}
+@article{Yu,
+author = {Yu, Lei and Sapra, Karan and Shen, Haiying and Ye, Lin},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Yu et al. - Unknown - Cooperative End-to-End Tra ffi c Redundancy Elimination for Reducing Cloud Bandwidth Cost.pdf:pdf},
+isbn = {9781467324472},
+title = {{Cooperative End-to-End Tra ffi c Redundancy Elimination for Reducing Cloud Bandwidth Cost}}
+}
+
+@article{Anand2009,
+abstract = {A large amount of popular content is transferred repeatedly across network links in the Internet. In recent years, protocol-independent redundancy elimination, which can remove duplicate strings from within arbitrary network flows, has emerged as a powerful technique to improve the efficiency of network links in the face of repeated data. Many vendors offer such redundancy elimination middleboxes to improve the effective bandwidth of enterprise, data center and ISP links alike. In this paper, we conduct a large scale trace-driven study of protocol independent redundancy elimination mechanisms, driven by several terabytes of packet payload traces collected at 12 distinct network locations, including the access link of a large US-based university and of 11 enterprise networks of different sizes. Based on extensive analysis, we present a number of findings on the benefits and fundamental design issues in redundancy elimination systems. Two of our key findings are (1) A new redundancy elimination algorithm based on Winnowing that outperforms the widely-used Rabin fingerprint-based algorithm by 5-10{\%} on most traces and by as much as 35{\%} in some traces. (2) A surprising finding that 75-90{\%} of middlebox's bandwidth savings in our enterprise traces is due to redundant byte-strings from within each client's traffic, implying that pushing redundancy elimination capability to the end hosts, i.e. an end-to-end redundancy elimination solution, could obtain most of the middlebox's bandwidth savings.},
+author = {Anand, Ashok and Muthukrishnan, Chitra and Akella, Aditya and Ramjee, Ramachandran},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Anand et al. - 2009 - Redundancy in network traffic findings and implications.pdf:pdf},
+isbn = {9781605585116},
+journal = {Proceedings of SIGMETRICS/Performance'09},
+keywords = {traffic engineering,traffic redundancy},
+pages = {37--48},
+title = {{Redundancy in network traffic: findings and implications}},
+url = {http://portal.acm.org/citation.cfm?id=1555355},
+year = {2009}
+}
+
+
+@article{Aggarwal,
+annote = {随着网络内容的加密化,以及原有RE无法对移动设备的最后一跳做优化,将原RE的两个中间件中的一个放在客户端,其也提出了一个非对称的方式,将大量的处理和内存使用放在客户端,同时提出了一种新的模糊哈希算法和数据结构来提高处理速度和降低缓存。},
+author = {Aggarwal, Bhavish and Akella, Aditya and Anand, Ashok and Balachandran, Athula},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Aggarwal et al. - Unknown - EndRE An End-System Redundancy Elimination Service for Enterprises.pdf:pdf},
+title = {{EndRE : An End-System Redundancy Elimination Service for Enterprises}}
+}
+@article{Feng2013How,
+ title={How to Reduce Smartphone Traffic Volume by 30%?},
+ author={Feng, Qian and Huang, Junxian and Erman, Jeffrey and Mao, Z. Morley and Sen, Subhabrata and Spatscheck, Oliver},
+ booktitle={International Conference on Passive & Active Measurement},
+ year={2013},
+}
+
+@article{Store2013,
+author = {Store, A Receiver Chunk and Algorithm, B Receiver},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Store, Algorithm - 2013 - Advanced Prediction-Based System for Cloud Bandwidth and Cost Reduction.pdf:pdf},
+keywords = {caching,cloud computing,network optimization,redundancy elimination,traffic},
+pages = {224--226},
+title = {{Advanced Prediction-Based System for Cloud Bandwidth and Cost Reduction}},
+volume = {8491},
+year = {2013}
+}
+@article{Mohammad2013,
+abstract = {Such Dataset have been collected using our own tool, in the attached pdf document you can find details of the dataset and the features in these datasets
+
+All datasets were collected using our own tool (Mohammad, McCluskey, {\&} Thabtah, 2012), and based on the extraction rules suggested in this article.},
+author = {Mohammad, Rami M and Thabtah, Fadi and Mccluskey, Lee},
+file = {:C$\backslash$:/论文/feature selection/PhishingWebsitesFeatures.pdf:pdf},
+journal = {Ieee},
+number = {May},
+pages = {1--7},
+title = {{Phishing Websites Features}},
+url = {papers3://publication/uuid/6A553382-D05D-48FA-97AA-382C3203BB1F},
+year = {2013}
+}
+@article{Xiao2017,
+abstract = {TUX2 is a new distributed graph engine that bridges graph computation and distributed machine learning. TUX2 inherits the benefits of an elegant graph computation model, efficient graph layout, and balanced parallelism to scale to billion-edge graphs; we extend and optimize it for distributed machine learning to support heterogeneity, a Stale Synchronous Parallel model, and a new MEGA (Mini-batch, Exchange, GlobalSync, and Apply) model. We have developed a set of representative distributed machine learning algorithms in TUX2, covering both supervised and unsupervised learning. Compared to implementations on distributed machine learning platforms, writing these algorithms in TUX2 takes only about 25{\%} of the code: Our graph computation model hides the detailed management of data layout, partitioning, and parallelism from developers. Our extensive evaluation of TUX2, using large data sets with up to 64 billion edges, shows that TUX2 outperforms state-of-the-art distributed graph engines PowerGraph and PowerLyra by an order of magnitude, while beating two state-of-the-art distributed machine learning systems by at least 48{\%}.},
+author = {Xiao, Wencong and Xue, Jilong and Miao, Youshan and Li, Zhen and Chen, Cheng and Wu, Ming and Li, Wei and Zhou, Lidong},
+file = {:C$\backslash$:/论文/yingship/nsdi17-xiao.pdf:pdf},
+isbn = {978-1-931971-37-9},
+journal = {Nsdi},
+pages = {669--682},
+title = {{Tux2: Distributed Graph Computation for Machine Learning}},
+url = {https://www.usenix.org/conference/nsdi17/technical-sessions/presentation/xiao},
+year = {2017}
+}
+@article{aoli2010,
+ title={重复数据删除技术},
+ author={敖莉 and 舒继武 and 李明强},
+ journal={软件学报},
+ volume={21},
+ number={5},
+ pages={916-929},
+ year={2010},
+}
+@article{Sahoo2017,
+abstract = {Malicious URL, a.k.a. malicious website, is a common and serious threat to cybersecurity. Malicious URLs host unsolicited content (spam, phishing, drive-by exploits, etc.) and lure unsuspecting users to become victims of scams (monetary loss, theft of private information, and malware installation), and cause losses of billions of dollars every year. It is imperative to detect and act on such threats in a timely manner. Traditionally, this detection is done mostly through the usage of blacklists. However, blacklists cannot be exhaustive, and lack the ability to detect newly generated malicious URLs. To improve the generality of malicious URL detectors, machine learning techniques have been explored with increasing attention in recent years. This article aims to provide a comprehensive survey and a structural understanding of Malicious URL Detection techniques using machine learning. We present the formal formulation of Malicious URL Detection as a machine learning task, and categorize and review the contributions of literature studies that addresses different dimensions of this problem (feature representation, algorithm design, etc.). Further, this article provides a timely and comprehensive survey for a range of different audiences, not only for machine learning researchers and engineers in academia, but also for professionals and practitioners in cybersecurity industry, to help them understand the state of the art and facilitate their own research and practical applications. We also discuss practical issues in system design, open research challenges, and point out some important directions for future research.},
+annote = {恶意url检测综述},
+archivePrefix = {arXiv},
+arxivId = {1701.07179},
+author = {Sahoo, Doyen and Liu, Chenghao and Hoi, Steven C. H.},
+eprint = {1701.07179},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sahoo, Liu, Hoi - 2017 - Malicious URL Detection using Machine Learning A Survey.pdf:pdf},
+pages = {1--21},
+title = {{Malicious URL Detection using Machine Learning: A Survey}},
+url = {http://arxiv.org/abs/1701.07179},
+year = {2017}
+}
+@article{Sahoo2017Malicious,
+ title={Malicious URL Detection using Machine Learning: A Survey},
+ author={Sahoo, Doyen and Liu, Chenghao and Hoi, Steven C. H.},
+ year={2017},
+}
+@article{Tan2018,
+author = {Tan, Guolin and Zhang, Peng and Liu, Qingyun and Liu, Xinran and Zhu, Chunge and Dou, Fenghu},
+file = {:C$\backslash$:/论文/恶意url检测/08455975.pdf:pdf},
+isbn = {9781538643877},
+journal = {Proceedings - 17th IEEE International Conference on Trust, Security and Privacy in Computing and Communications and 12th IEEE International Conference on Big Data Science and Engineering, Trustcom/BigDataSE 2018},
+keywords = {URL,adaptive learning,concept drift,detection,malicious,traffic},
+mendeley-groups = {url},
+pages = {737--743},
+publisher = {IEEE},
+title = {{Adaptive Malicious URL Detection: Learning in the Presence of Concept Drifts}},
+year = {2018}
+}
+@inproceedings{Chen2008Efficient,
+ title={Efficient Merging and Filtering Algorithms for Approximate String Searches},
+ author={Chen, Li and Lu, Jiaheng and Lu, Yiming},
+ booktitle={IEEE International Conference on Data Engineering},
+ year={2008},
+}
+@article{Lumezanu2010,
+abstract = {Network-level redundancy elimination (RE) algorithms re- duce traffic volume on bandwidth-constrained network paths by avoiding the transmission of repeated byte sequences. Previous work shows that RE can suppress the transmis- sion of 20-50{\%} bytes when deployed at ISP access links or between routers. In this paper, we focus on the challenges of deploying RE in cellular networks. The potential benefit is substantial, since cellular networks have a growing sub- scriber base and network links, including wired backhaul, are often oversubscribed. Using three large traces captured at two North American and one European wireless network providers, we show that RE can reduce the bandwidth con- sumption of the majority of mobile users by at least 10{\%}. However, cellular links have much higher packet loss rates than their wired counterparts, which makes applying RE much more difficult. Our experiments also show that the loss of only a few packets can disrupt RE and eliminate the bandwidth savings. We propose informed marking, a lightweight scheme that detects lost packets and prevents RE algorithms from using them for future encodings. We implement RE with informed marking and deploy it in a real-world cellular network. Our results show that with in- formed marking, more than 60{\%} of the bandwidth savings of RE are preserved, even when packet loss rates are high.},
+author = {Lumezanu, Cristian and Guo, Katherine and Spring, Neil and Bhattacharjee, Bobby},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Lumezanu et al. - 2010 - The effect of packet loss on redundancy elimination in cellular wireless networks.pdf:pdf},
+isbn = {9781450304832},
+journal = {Proceedings of ACM IMC},
+keywords = {cellular networks,loss,redundancy elimination},
+number = {January},
+pages = {294},
+title = {{The effect of packet loss on redundancy elimination in cellular wireless networks}},
+url = {http://dl.acm.org/citation.cfm?id=1879141.1879179},
+year = {2010}
+}
+@article{Ge2014,
+abstract = {The resources of dedicated accelerators (e.g. FPGA) are still required to bridge the gap between software-based Middleboxs(MBs) and the commodity hardware. To consolidate various hardware resources in an elastic, programmable and reconfigurable manner, we design and build a flexible and consolidated framework, OpenANFV, to support virtualized accelerators for MBs in the cloud environment. OpenANFV is seamlessly and efficiently put into Openstack to provide high performance on top of commodity hardware to cope with various virtual function requirements. OpenANFV works as an independent component to manage and virtualize the acceleration resources (e.g. cinder manages block storage resources and nova manages computing resources). Specially, OpenANFV mainly has the following three features. (1)Automated Management. Provisioning for multiple Virtualized Network Functions (VNFs) is automated to meet the dynamic requirements of NFV environment. Such automation alleviates the time pressure of the complicated provisioning and configuration as well as reduces the probability of manually induced configuration errors. (2) Elasticity. VNFs are created, migrated, and destroyed on demand in real time. The reconfigurable hardware resources in pool can rapidly and flexibly offload the corresponding services to the accelerator platform in the dynamic NFV environment. (3) Coordinating with Openstack. The design and implementation of the OpenANFV APIs coordinate with the mechanisms in Openstack to support required virtualized MBs for multiple tenants.},
+author = {Ge, Xiongzi and Liu, Yi and Du, David H.C. and Zhang, Liang and Guan, Hongguang and Chen, Jian and Zhao, Yuping and Hu, Xinyu},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Ge et al. - 2014 - OpenANFV.pdf:pdf},
+isbn = {9781450328364},
+issn = {01464833},
+journal = {Proceedings of the 2014 ACM conference on SIGCOMM - SIGCOMM '14},
+keywords = {middlebox,network function virtualization,openstack},
+pages = {353--354},
+title = {{OpenANFV}},
+url = {http://dl.acm.org/citation.cfm?doid=2619239.2631426},
+year = {2014}
+}
+@article{Yu2004a,
+abstract = {Feature selection is applied to reduce the number of features in many applications where data has hundreds or thousands of features. Existing feature selection methods mainly focus on finding relevant features. In this paper, we show that feature relevance alone is insufficient for efficient feature selection of high-dimensional data. We define feature redundancy and propose to perform explicit redundancy analysis in feature selection. A new framework is introduced that decouples relevance analysis and redundancy analysis. We develop a correlation-based method for relevance and redundancy analysis, and conduct an empirical study of its efficiency and effectiveness comparing with representative methods.},
+author = {Yu, Lei and Liu, Huan},
+file = {:C$\backslash$:/论文/feature selection/LiuH-PaperJMLR04.pdf:pdf},
+issn = {1532-4435},
+journal = {J. Mach. Learn. Res.},
+keywords = {feature selection,high dimensionality,redundancy,relevance,supervised learning},
+pages = {1205--1224},
+title = {{Efficient Feature Selection via Analysis of Relevance and Redundancy}},
+url = {http://dl.acm.org/citation.cfm?id=1005332.1044700},
+volume = {5},
+year = {2004}
+}
+@techreport{CNNIC2018,
+author = {CNNIC},
+title = {中国互联网络发展状况统计报告},
+url = {http://www.cac.gov.cn/2018-08/20/c\_1123296882.htm},
+year = {2018}
+}
+@techreport{PhenomenaReport,
+author = {sandvine},
+title = {2018-internet-phenomena-report},
+url = {https://www.sandvine.com/2018-internet-phenomena-report},
+year = {2018}
+}
+@techreport{CiscoReport,
+author = {cisco},
+title = {visual-networking-index-vni},
+url = {https://www.cisco.com/c/en/us/solutions/collateral/service-provider/visual-networking-index-vni/white-paper-c11-741490.html},
+year = {2018}
+}
+@article{Hao2017,
+abstract = {{\textcopyright} 1999-2012 IEEE. Near-duplicate video retrieval (NDVR) has been a significant research task in multimedia given its high impact in applications, such as video search, recommendation, and copyright protection. In addition to accurate retrieval performance, the exponential growth of online videos has imposed heavy demands on the efficiency and scalability of the existing systems. Aiming at improving both the retrieval accuracy and speed, we propose a novel stochastic multiview hashing algorithm to facilitate the construction of a large-scale NDVR system. Reliable mapping functions, which convert multiple types of keyframe features, enhanced by auxiliary information such as video-keyframe association and ground truth relevance to binary hash code strings, are learned by maximizing a mixture of the generalized retrieval precision and recall scores. A composite Kullback-Leibler divergence measure is used to approximate the retrieval scores, which aligns stochastically the neighborhood structures between the original feature and the relaxed hash code spaces. The efficiency and effectiveness of the proposed method are examined using two public near-duplicate video collections and are compared against various classical and state-of-the-art NDVR systems.},
+author = {Hao, Yanbin and Mu, Tingting and Hong, Richang and Wang, Meng and An, Ning and Goulermas, John Y.},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hao et al. - 2017 - Stochastic Multiview Hashing for Large-Scale Near-Duplicate Video Retrieval.pdf:pdf},
+issn = {15209210},
+journal = {IEEE Transactions on Multimedia},
+keywords = {Divergence,hashing,multiview learning,near-duplicate video retrieval (NDVR),semi-supervised learning},
+number = {1},
+pages = {1--14},
+title = {{Stochastic Multiview Hashing for Large-Scale Near-Duplicate Video Retrieval}},
+volume = {19},
+year = {2017}
+}
+@article{Sanadhya2012,
+abstract = {Network deduplication (dedup) is an attractive approach to improve network performance for mobile devices. With traditional deduplication, the dedup source uses only the portion of the cache at the dedup destination that it is aware of. We argue in this work that in a mobile environment, the dedup destination (say the mobile) could have accumulated a much larger cache than what the current dedup source is aware of. This can occur because of several reasons ranging from the mobile consuming content through heterogeneous wireless technologies, to the mobile moving across different wireless networks. In this context, we propose asymmetric caching, a solution that is overlaid on baseline network deduplication, but which allows the dedup destination to selectively feedback appropriate portions of its cache to the dedup source with the intent of improving the redundancy elimination efficiency. We show using traffic traces collected from 30 mobile users, that with asymmetric caching, over 89{\%} of the achievable redundancy can be identified and eliminated even when the dedup source has less than one hundredth of the cache size as the dedup destination. Further, we showthat the ratio of bytes saved from transmission at the dedup source because of asymmetric caching is over 6× that of the number of bytes sent as feedback. Finally, with a prototype implementation of asymmetric caching on both a Linux laptop and an Android smartphone, we demonstrate that the solution is deployable with reasonable CPU and memory overheads.},
+annote = {提出了一种非对称的缓存机制,dd-dst的缓存略大于dd-src的缓存,dd-src向dd-dst传输数据时,若发现数据与旧数据重复,dd-dst传回旧数据片段作为反馈,已达到减少数据传输冗余的效果。},
+author = {Sanadhya, Shruti and Sivakumar, Raghupathy and Kim, Kyu-Han and Congdon, Paul and Lakshmanan, Sriram and Singh, Jatinder Pal},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Sanadhya et al. - 2012 - Asymmetric Caching Improved Network Deduplication for Mobile Devices.pdf:pdf},
+isbn = {9781450311595},
+journal = {Mobicom},
+keywords = {asymmetric caching,bandwidth conservation,mobile devices,mobile networks,mobile traffic,network deduplication},
+pages = {161},
+title = {{Asymmetric Caching: Improved Network Deduplication for Mobile Devices}},
+url = {http://dl.acm.org/citation.cfm?id=2348565{\%}5Cnhttp://dl.acm.org/citation.cfm?doid=2348543.2348565},
+year = {2012}
+}
+@article{Rhea2003,
+author = {Rhea, Sean C and Liang, Kevin and Brewer, Eric},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Rhea, Liang, Brewer - 2003 - Value-Based Web Caching.pdf:pdf},
+isbn = {1581136803},
+keywords = {aliasing,caching,duplicate suppression,dynamic content,http,hypertext transfer protocol,privacy,proxy,redundant transfers,resource modification,scalability,world wide web,www},
+number = {May},
+title = {{Value-Based Web Caching}},
+year = {2003}
+}
+@article{Halepovic2012,
+author = {Halepovic, Emir and Ghaderi, Majid and Williamson, Carey},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Halepovic, Ghaderi, Williamson - 2012 - On the performance of redundant traffic elimination in WLANs.pdf:pdf},
+isbn = {9781457720529},
+issn = {15503607},
+journal = {IEEE International Conference on Communications},
+keywords = {Elimination,Measurement,Performance,Redundancy,Traffic,WLAN,Wireless},
+pages = {5434--5439},
+title = {{On the performance of redundant traffic elimination in WLANs}},
+year = {2012}
+}
+@article{Hao2016Stochastic,
+ title={Stochastic Multiview Hashing for Large-Scale Near-Duplicate Video Retrieval},
+ author={Hao, Yanbin and Mu, Tingting and Hong, Richang and Meng, Wang and Goulermas, John Y.},
+ journal={IEEE Transactions on Multimedia},
+ volume={PP},
+ number={99},
+ pages={1-1},
+ year={2016},
+}
+@inproceedings{Halepovic2013On,
+ title={On the performance of Redundant Traffic Elimination in WLANs},
+ author={Halepovic, Emir and Ghaderi, Majid and Williamson, Carey},
+ booktitle={IEEE International Conference on Communications},
+ year={2013},
+}
+@article{Lumezanu2010The,
+ title={The Effect of Packet Loss on Redundancy Elimination in Cellular Wireless Networks},
+ author={Lumezanu, Cristian and Guo, Katherine and Spring, Neil and Bhattacharjee, Bobby},
+ booktitle={Acm Sigcomm Conference on Internet Measurement},
+ year={2010},
+}
+@article{Feng,
+annote = {在客户端和服务器端增加了一个软件定义网络来降低高延迟对传输冗余的影响。},
+author = {Feng, Dan},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Feng - Unknown - Smart In-Network Deduplication for Storage-aware SDN.pdf:pdf},
+isbn = {9781450320566},
+keywords = {deduplication,resources of servers,software defined network,storage systems,transmission,while incurring heavy network},
+pages = {509--510},
+title = {{Smart In-Network Deduplication for Storage-aware SDN}}
+}
+@article{Hua2013,
+abstract = {Cloud computing applications face the challenges of dealing with a huge volume of data that needs the support of fast approximate queries to enhance system scalability and improve quality of service, especially when users are not aware of exact query inputs. Locality-Sensitive Hashing (LSH) can support the approximate queries that unfortunately suffer from imbalanced load and space inefficiency among distributed data servers, which severely limits the query accuracy and incurs long query latency between users and cloud servers. In this paper, we propose a novel scheme, called NEST, which offers ease-of-use and cost-effective approximate query service for cloud computing. The novelty of NEST is to leverage cuckoo-driven locality-sensitive hashing to find similar items that are further placed closely to obtain load-balancing buckets in hash tables. NEST hence carries out flat and manageable addressing in adjacent buckets, and obtains constant-scale query complexity even in the worst case. The benefits of NEST include the increments of space utilization and fast query response. Theoretical analysis and extensive experiments in a large-scale cloud testbed demonstrate the salient properties of NEST to meet the needs of approximate query service in cloud computing environments.},
+author = {Hua, Yu and Xiao, Bin and Liu, Xue},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Hua, Xiao, Liu - 2013 - NEST Locality-aware approximate query service for cloud computing.pdf:pdf},
+isbn = {9781467359467},
+issn = {0743166X},
+journal = {Proceedings - IEEE INFOCOM},
+pages = {1303--1311},
+title = {{NEST: Locality-aware approximate query service for cloud computing}},
+year = {2013}
+}
+@article{Mogul2004a,
+abstract = {Organizations use Web caches to avoid transferring the$\backslash$nsame data twice over the same path. Numerous studies$\backslash$nhave shown that forward proxy caches, in practice, incur$\backslash$nmiss rates of at least 50{\%}. Traditional Web caches rely on$\backslash$nthe reuse of responses for given URLs. Previous analyses$\backslash$nof real-world traces have revealed a complex relationship$\backslash$nbetween URLs and reply payloads, and have shown$\backslash$nthat this complexity frequently causes redundant transfers$\backslash$nto caches. For example, redundant transfers may...},
+author = {Mogul, Jeffery C and Chan, Yee Man and Kelly, Terence},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Mogul, Chan, Kelly - 2004 - Design, implementation, and evaluation of duplicate transfer detection in HTTP(2).pdf:pdf},
+journal = {Proceedings of USENIX NSDI},
+number = {March},
+pages = {4},
+title = {{Design, implementation, and evaluation of duplicate transfer detection in HTTP}},
+year = {2004}
+}
+@article{Anand2010Cheap,
+ title={Cheap and Large CAMs for High Performance Data-Intensive Networked Systems},
+ author={Anand, Ashok and Muthukrishnan, Chitra and Kappes, Steven and Akella, Aditya and Nath, Suman},
+ booktitle={Usenix Symposium on Networked Systems Design & Implementation},
+ year={2010},
+}
+@article{Tang2017,
+author = {Tang, Pingping and Wang, Zaijian},
+file = {:C$\backslash$:/论文/yingship/Classification of Internet Video Traffic Using.pdf:pdf},
+isbn = {9781509065141},
+keywords = {classification,feature,multi-fractals,video traffic},
+number = {978},
+title = {{Classification of Internet Video Traffic Using}},
+year = {2017}
+}
+@article{Reed2017,
+abstract = {After more than a year of research and development, Netflix recently upgraded their infrastructure to provide HTTPS encryption of video streams in order to protect the privacy of their viewers. Despite this upgrade, we demonstrate that it is possible to accurately identify Netflix videos from passive traffic capture in real-time with very limited hardware requirements. Specifically, we developed a system that can report the Netflix video being delivered by a TCP connection using only the information provided by TCP/IP headers. To support our analysis, we created a fingerprint database comprised of 42,027 Netflix videos. Given this collection of fingerprints, we show that our system can differentiate between videos with greater than 99.99{\%} accuracy. Moreover, when tested against 200 random 20-minute video streams, our system identified 99.5{\%} of the videos with the majority of the identifications occurring less than two and a half minutes into the video stream.},
+author = {Reed, Andrew and Kranch, Michael},
+file = {:C$\backslash$:/论文/yingship/p361-reed.pdf:pdf},
+isbn = {9781450345231},
+journal = {Proceedings of the Seventh ACM on Conference on Data and Application Security and Privacy - CODASPY '17},
+keywords = {dynamic adaptive streaming over,http,privacy,traffic analysis},
+pages = {361--368},
+title = {{Identifying HTTPS-Protected Netflix Videos in Real-Time}},
+url = {http://dl.acm.org/citation.cfm?doid=3029806.3029821},
+year = {2017}
+}
+@article{Winter2013,
+abstract = {Fuzzy hashing provides the possibility to identify similar files based on their hash signatures, which is useful for forensic investigations. Current tools for fuzzy hashing, e. g. ssdeep, perform similarity search on fuzzy hashes by brute force. This is often too time-consuming for real cases. We solve this issue for ssdeep and even a larger class of fuzzy hashes, namely for piecewise hash signatures, by introducing a suitable indexing strategy. The strategy is based on n-grams contained in the piecewise hash signatures, and it allows for answering similarity queries very efficiently. The implementation of our solution is called F2S2. This tool reduces the time needed for typical investigations from many days to minutes. ?? 2013 Elsevier Ltd. All rights reserved.},
+author = {Winter, Christian and Schneider, Markus and Yannikos, York},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Winter, Schneider, Yannikos - 2013 - F2S2 Fast forensic similarity search through indexing piecewise hash signatures.pdf:pdf},
+isbn = {1742-2876},
+issn = {17422876},
+journal = {Digital Investigation},
+keywords = {Digital forensics,Indexing,Piecewise hashing,Similarity search,n-Gram,ssdeep},
+number = {4},
+pages = {361--371},
+publisher = {Elsevier Ltd},
+title = {{F2S2: Fast forensic similarity search through indexing piecewise hash signatures}},
+url = {http://dx.doi.org/10.1016/j.diin.2013.08.003},
+volume = {10},
+year = {2013}
+}
+@article{Song2011,
+abstract = {Abstract Near - duplicate video retrieval (NDVR) has recently attracted lots of research attention due to the exponential growth of online videos. It helps in many areas, such as copyright protection, video tagging, online video usage monitoring, etc. Most of existing ... $\backslash$n},
+author = {Song, Jingkuan and Yang, Y and Huang, Z},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Song, Yang, Huang - 2011 - Multiple feature hashing for real-time large scale near-duplicate video retrieval.pdf:pdf},
+isbn = {9781450306164},
+journal = {Proceedings of the 19th {\ldots}},
+keywords = {hashing,large scale,near-duplicate,video retrieval},
+pages = {423--432},
+title = {{Multiple feature hashing for real-time large scale near-duplicate video retrieval}},
+url = {http://dl.acm.org/citation.cfm?id=2072354},
+year = {2011}
+}
+@article{Chen2016,
+abstract = {—We describe how malicious customers can attack the availability of Content Delivery Networks (CDNs) by creating forwarding loops inside one CDN or across multiple CDNs. Such forwarding loops cause one request to be processed repeatedly or even indefinitely, resulting in undesired resource consumption and potential Denial-of-Service attacks. To evaluate the practicality of such forwarding-loop attacks, we examined 16 popular CDN providers and found all of them are vulnerable to some form of such attacks. While some CDNs appear to be aware of this threat and have adopted specific forwarding-loop detection mechanisms, we discovered that they can all be bypassed with new attack tech-niques. Although conceptually simple, a comprehensive defense requires collaboration among all CDNs. Given that hurdle, we also discuss other mitigations that individual CDN can implement immediately. At a higher level, our work underscores the hazards that can arise when a networked system provides users with control over forwarding, particularly in a context that lacks a single point of administrative control.},
+author = {Chen, Jianjun and Weaver, Nicholas and Wan, Tao},
+file = {:C$\backslash$:/论文/network redundancy/Chen, Weaver, Wan - 2016 - Host of Troubles Multiple Host Ambiguities in HTTP Implementations(2).pdf:pdf},
+isbn = {9781450341394},
+issn = {15437221},
+journal = {Ccs},
+pages = {1516--1527},
+title = {{Host of Troubles : Multiple Host Ambiguities in HTTP Implementations}},
+year = {2016}
+}
+@article{Rimmer2017,
+abstract = {Several studies have shown that the network traffic that is generated by a visit to a website over Tor reveals information specific to the website through the timing and sizes of network packets. By capturing traffic traces between users and their Tor entry guard, a network eavesdropper can leverage this meta-data to reveal which website Tor users are visiting. The success of such attacks heavily depends on the particular set of traffic features that are used to construct the fingerprint. Typically, these features are manually engineered and, as such, any change introduced to the Tor network can render these carefully constructed features ineffective. In this paper, we show that an adversary can automate the feature engineering process, and thus automatically deanonymize Tor traffic by applying our novel method based on deep learning. We collect a dataset comprised of more than three million network traces, which is the largest dataset of web traffic ever used for website fingerprinting, and find that the performance achieved by our deep learning approaches is comparable to known methods which include various research efforts spanning over multiple years. The obtained success rate exceeds 96{\%} for a closed world of 100 websites and 94{\%} for our biggest closed world of 900 classes. In our open world evaluation, the most performant deep learning model is 2{\%} more accurate than the state-of-the-art attack. Furthermore, we show that the implicit features automatically learned by our approach are far more resilient to dynamic changes of web content over time. We conclude that the ability to automatically construct the most relevant traffic features and perform accurate traffic recognition makes our deep learning based approach an efficient, flexible and robust technique for website fingerprinting.},
+archivePrefix = {arXiv},
+arxivId = {1708.06376},
+author = {Rimmer, Vera and Preuveneers, Davy and Juarez, Marc and {Van Goethem}, Tom and Joosen, Wouter},
+eprint = {1708.06376},
+file = {:C$\backslash$:/论文/fingerprint/ndss2018{\_}03A-1{\_}Rimmer{\_}paper.pdf:pdf},
+isbn = {1-891562-49-5},
+title = {{Automated Website Fingerprinting through Deep Learning}},
+url = {http://arxiv.org/abs/1708.06376{\%}0Ahttp://dx.doi.org/10.14722/ndss.2018.23105},
+year = {2017}
+}
+@article{Wu,
+author = {Wu, Kun-lung and Yu, Philip S and Wolf, Joel L},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wu et al. - Unknown - Segment-Based Proxy Caching of Multimedia Streams.pdf:pdf},
+isbn = {1581133480},
+title = {{Segment-Based Proxy Caching of Multimedia Streams}}
+}
+@article{Zhang2017,
+abstract = {Video cameras are pervasively deployed for security and smart city scenarios, with millions of them in large cities worldwide. Achieving the potential of these cam- eras requires efficiently analyzing the live videos in real- time. We describe VideoStorm, a video analytics system that processes thousands of video analytics queries on live video streams over large clusters. Given the high costs of vision processing, resource management is cru- cial. We consider two key characteristics of video ana- lytics: resource-quality tradeoff with multi-dimensional configurations, and variety in quality and lag goals. VideoStorm's offline profiler generates query resource- quality profile, while its online scheduler allocates re- sources to queries to maximize performance on quality and lag, in contrast to the commonly used fair sharing of resources in clusters. Deployment on an Azure clus- ter of 101 machines shows improvement by as much as 80{\%} in quality of real-world queries and 7× better lag, processing video from operational traffic cameras.},
+author = {Zhang, Haoyu and Ananthanarayanan, Ganesh and Bodik, Peter and Philipose, Matthai and Bahl, Paramvir and Freedman, Michael J.},
+file = {:C$\backslash$:/论文/yingship/nsdi17-zhang.pdf:pdf},
+isbn = {978-1-931971-37-9},
+journal = {Proceedings of the 14th USENIX Conference on Networked Systems Design and Implementation},
+keywords = {Video Analytics, Big Data Analytics, Cloud Computi},
+pages = {377--392},
+title = {{Live video analytics at scale with approximation and delay-tolerance}},
+url = {https://dl.acm.org/citation.cfm?id=3154661},
+year = {2017}
+}
+@article{Kornblum2006,
+abstract = {Homologous files share identical sets of bits in the same order. Because such files are not completely identical, traditional techniques such as cryptographic hashing cannot be used to identify them. This paper introduces a new technique for constructing hash signatures by combining a number of traditional hashes whose boundaries are determined by the context of the input. These signatures can be used to identify modified versions of known files even if data has been inserted, modified, or deleted in the new files. The description of this method is followed by a brief analysis of its performance and some sample applications to computer forensics. ?? 2006 DFRWS.},
+author = {Kornblum, Jesse},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Kornblum - 2006 - Identifying almost identical files using context triggered piecewise hashing.pdf:pdf},
+isbn = {17422876},
+issn = {17422876},
+journal = {Digital Investigation},
+keywords = {Forensics,Memory analysis,Microsoft,Reverse engineering,Windows},
+number = {SUPPL.},
+pages = {91--97},
+title = {{Identifying almost identical files using context triggered piecewise hashing}},
+volume = {3},
+year = {2006}
+}
+@article{Wang2015,
+abstract = {{\textcopyright} 2015 IEEE. String similarity search is a fundamental operation in data cleaning and integration. It has two variants, threshold-based string similarity search and top-k string similarity search. Existing algorithms are efficient either for the former or the latter; most of them can't support both two variants. To address this limitation, we propose a unified framework. We first recursively partition strings into disjoint segments and build a hierarchical segment tree index (HS-Tree) on top of the segments. Then we utilize the HS-Tree to support similarity search. For threshold-based search, we identify appropriate tree nodes based on the threshold to answer the query and devise an efficient algorithm (HS-Search). For top-k search, we identify promising strings with large possibility to be similar to the query, utilize these strings to estimate an upper bound which is used to prune dissimilar strings, and propose an algorithm (HS-Topk). We also develop effective pruning techniques to further improve the performance. Experimental results on real-world datasets show our method achieves high performance on the two problems and significantly outperforms state-of-the-art algorithms.},
+author = {Wang, Jin and Li, Guoliang and Deng, Dong and Zhang, Yong and Feng, Jianhua},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wang et al. - 2015 - Two birds with one stone An efficient hierarchical framework for top-k and threshold-based string similarity search.pdf:pdf},
+isbn = {9781479979639},
+issn = {10844627},
+journal = {Proceedings - International Conference on Data Engineering},
+keywords = {[Electronic Manuscript]},
+pages = {519--530},
+title = {{Two birds with one stone: An efficient hierarchical framework for top-k and threshold-based string similarity search}},
+volume = {2015-May},
+year = {2015}
+}
+@article{Chung2018,
+author = {Chung, Jae Won and Claypool, Mark},
+file = {:C$\backslash$:/论文/yingship/Silhouette - Identifying YouTube Video Flows from Encrypted Traffic.pdf:pdf},
+isbn = {9781450357722},
+journal = {Nossdav 2018},
+keywords = {2018,acm reference format,and mark claypool,feng li,http adaptive streaming,identifying,jae won chung,quic,service classi cation,silhouette,youtube},
+title = {{Silhoue e – Identifying YouTube Video Flows from Encrypted Tra ic}},
+year = {2018}
+}
+@article{Reed2016,
+author = {Reed, Andrew and Klimkowski, Benjamin},
+file = {:C$\backslash$:/论文/yingship/CCNC2016{\_}Reed{\_}Klimkowski{\_}Identifying{\_}VBR{\_}DASH.pdf:pdf},
+keywords = {dynamic adaptive streaming over,http,privacy,traffic analysis},
+pages = {1114--1119},
+title = {{Leaky Streams}},
+year = {2016}
+}
+@article{Wei,
+author = {Wei, Jiansheng and Jiang, Hong and Zhou, Ke and Feng, Dan},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Wei et al. - Unknown - MAD2 A Scalable High-Throughput Exact Deduplication Approach for Network Backup Services.pdf:pdf},
+isbn = {9781424471539},
+title = {{MAD2 : A Scalable High-Throughput Exact Deduplication Approach for Network Backup Services}}
+}
+@article{Tan2018,
+author = {Tan, Guolin and Zhang, Peng and Liu, Qingyun and Liu, Xinran and Zhu, Chunge and Dou, Fenghu},
+file = {:C$\backslash$:/论文/恶意url检测/08455975.pdf:pdf},
+isbn = {9781538643877},
+journal = {Proceedings - 17th IEEE International Conference on Trust, Security and Privacy in Computing and Communications and 12th IEEE International Conference on Big Data Science and Engineering, Trustcom/BigDataSE 2018},
+keywords = {URL,adaptive learning,concept drift,detection,malicious,traffic},
+pages = {737--743},
+publisher = {IEEE},
+title = {{Adaptive Malicious URL Detection: Learning in the Presence of Concept Drifts}},
+year = {2018}
+}
+@article{Zhang2018,
+abstract = {The emerging class of wide-area streaming analytics faces the challenge of scarce and variable WAN bandwidth. Non-adaptive applications built with TCP or UDP suffer from increased latency or degraded accuracy. State-of-the-art approaches that adapt to network changes require developer writing sub-optimal manual policies or are limited to application-specific optimizations.
+
+We present AWStream, a stream processing system that simultaneously achieves low latency and high accuracy in the wide area, requiring minimal developer efforts. To realize this, AWStream uses three ideas: (i) it integrates application adaptation as a first-class programming abstraction in the stream processing model; (ii) with a combination of offline and online profiling, it automatically learns an accurate profile that models accuracy and bandwidth trade-off; and (iii) at runtime, it carefully adjusts the application data rate to match the available bandwidth while maximizing the achievable accuracy. We evaluate AWStream with three real-world applications: augmented reality, pedestrian detection, and monitoring log analysis. Our experiments show that AWStream achieves sub-second latency with only nominal accuracy drop (2-6{\%}).},
+author = {Zhang, Ben and Jin, Xin and Ratnasamy, Sylvia and Wawrzynek, John and Lee, Edward A.},
+file = {:C$\backslash$:/论文/yingship/p236-zhang.pdf:pdf},
+isbn = {9781450355674},
+journal = {Proceedings of the 2018 Conference of the ACM Special Interest Group on Data Communication - SIGCOMM '18},
+keywords = {acm reference format,adaptation,and ed-,ben zhang,john wawrzynek,learning,profiling,sylvia ratnasamy,wide area network,xin jin},
+pages = {236--252},
+title = {{AWStream}},
+url = {http://dl.acm.org/citation.cfm?doid=3230543.3230554},
+year = {2018}
+}
+@article{Shih2004,
+abstract = {We propose new features and algorithms for automating Web-page classification$\backslash$ntasks such as content recommendation and ad blocking. We show that$\backslash$nthe automated classification of Web pages can be much improved if,$\backslash$ninstead of looking at their textual content, we consider each links's$\backslash$nURL and the visual placement of those links on a referring page.$\backslash$nThese features are unusual: rather than being scalar measurements$\backslash$nlike word counts they are tree structured--describing the position$\backslash$nof the item in a tree. We develop a model and algorithm for machine$\backslash$nlearning using such tree-structured features. We apply our methods$\backslash$nin automated tools for recognizing and blocking Web advertisements$\backslash$nand for recommending ``interesting'' news stories to a reader. Experiments$\backslash$nshow that our algorithms are both faster and more accurate than those$\backslash$nbased on the text content of Web documents.},
+author = {Shih, L. K. and Karger, D. R.},
+file = {:C$\backslash$:/论文/feature selection/1p193.pdf:pdf},
+isbn = {158113844X},
+journal = {Proceedings of the 13th conference on World Wide Web - WWW '04},
+keywords = {classification,news recommendation,tree structures,web ap-},
+pages = {193},
+title = {{Using urls and table layout for web classification tasks}},
+url = {http://portal.acm.org/citation.cfm?doid=988672.988699},
+year = {2004}
+}
+@article{Li2014,
+abstract = {In this paper, we introduce Facet, an unobservable transport service for social video sites. Facet evades detection by Internet censors by streaming social videos over Skype calls, and applying a novel traffic-analysis countermeasure called video morphing. We report on the performance and security of a prototype implementation of Facet and find that a single Facet server can support roughly 20 si-multaneous sessions, while providing strong unobservability: using the best known traffic analysis methods, a censor seeking to block 90{\%} of Facet calls would need to block over 40{\%} of all Skype calls. An additional benefit of our prototype implementation is that it avoids the distribution problem: clients can use Facet without installing any additional software.},
+author = {Li, Shuai and Schliep, Mike and Hopper, Nick},
+file = {:C$\backslash$:/论文/yingship/wpes14-facet.pdf:pdf},
+isbn = {9781450331487},
+issn = {978-1-4503-3148-7},
+journal = {Wpes},
+keywords = {censorship resistance,traffic analysis,video conferencing},
+pages = {163--172},
+title = {{Facet: Streaming over Videoconferencing for Censorship Circumvention.}},
+url = {http://dl.acm.org/citation.cfm?doid=2665943.2665944{\%}0Ahttp://doi.acm.org/10.1145/2665943.2665944{\%}5Cnpapers3://publication/doi/10.1145/2665943.2665944},
+year = {2014}
+}
+@article{Chen2016a,
+abstract = {At the present time, billions of videos are hosted and shared in the cloud of which a sizable portion consists of near-duplicate video copies. An efficient and accurate content-based online near-duplicate video detection method is a fundamental research goal; as it would benefit applications such as duplication-aware storage, pirate video detection, polluted video tag detection, searching result diversification. Despite the recent progress made in near-duplicate video detection, it remains challenging to develop a practical detection system for large-scale applications that has good efficiency and accuracy performance. In this paper, we shift the focus from feature representation design to system design, and develop a novel system, called CompoundEyes, accordingly. The improvement in accuracy is achieved via well-organized classifiers instead of advanced feature design. Meanwhile, by applying simple features with reduced dimensionality and exploiting the parallelism of the detection architecture, we accelerate the detection speed. Through extensive experiments we demonstrate that the proposed detection system is accurate and fast. It takes approximately 1.45 seconds to process a video clip from a large video dataset, CC{\_}WEB{\_}VIDEO, with a 89{\%} detection accuracy.},
+author = {Chen, Yixin and He, Wenbo and Hua, Yu and Wang, Wen},
+file = {:C$\backslash$:/Users/80456/AppData/Local/Mendeley Ltd./Mendeley Desktop/Downloaded/Chen et al. - 2016 - CompoundEyes Near-duplicate detection in large scale online video systems in the cloud.pdf:pdf},
+isbn = {9781467399531},
+issn = {0743166X},
+journal = {Proceedings - IEEE INFOCOM},
+title = {{CompoundEyes: Near-duplicate detection in large scale online video systems in the cloud}},
+volume = {2016-July},
+year = {2016}
+}
+@article{Vastel2018a,
+author = {Vastel, Antoine and Inria, Inria and Laperdrix, Pierre and Rudametkin, Walter and Inria, Inria and Rouvoy, Romain and Iuf, Inria and Vastel, Antoine and Laperdrix, Pierre and Rouvoy, Romain},
+file = {:C$\backslash$:/论文/yingship/sec18-vastel.pdf:pdf},
+isbn = {9781931971461},
+journal = {USENIX Security},
+title = {{Fp-Scanner : The Privacy Implications of Browser Fingerprint Inconsistencies}},
+year = {2018}
+}
+@article{Li2017a,
+abstract = {Feature selection, as a data preprocessing strategy, has been proven to be effective and efficient in prepar-ing data (especially high-dimensional data) for various data mining and machine learning problems. The objectives of feature selection include: building simpler and more comprehensible models, improving data mining performance, and preparing clean, understandable data. The recent proliferation of big data has presented some substantial challenges and opportunities to feature selection. In this survey, we provide a comprehensive and structured overview of recent advances in feature selection research. Motivated by cur-rent challenges and opportunities in the era of big data, we revisit feature selection research from a data perspective and review representative feature selection algorithms for conventional data, structured data, heterogeneous data and streaming data. Methodologically, to emphasize the differences and similarities of most existing feature selection algorithms for conventional data, we categorize them into four main groups: similarity based, information theoretical based, sparse learning based and statistical based methods. To facilitate and promote the research in this community, we also present an open-source feature selection repository that consists of most of the popular feature selection algorithms (http://featureselection.asu.edu/). Also, we use it as an example to show how to evaluate feature selection algorithms. At the end of the survey, we present a discussion about some open problems and challenges that require more attention in future research.},
+archivePrefix = {arXiv},
+arxivId = {1601.07996},
+author = {Li, Jundong and Cheng, Kewei and Wang, Suhang and Morstatter, Fred and Trevino, Robert P and Tang, Jiliang and Liu, Huan},
+eprint = {1601.07996},
+file = {:C$\backslash$:/论文/feature selection/Feature Selection A Data Perspective.pdf:pdf},
+issn = {03600300},
+journal = {ACM Computing Surveys},
+number = {6},
+pages = {1--45},
+title = {{Feature Selection: A Data Perspective}},
+volume = {50},
+year = {2017}
+}
+@article{Ma2009,
+abstract = {Malicious Web sites are a cornerstone of Internet criminal activities. As a result, there has been broad interest in developing systems to prevent the end user from visiting such sites. In this paper, we describe an approach to this problem based on automated URL classification, using statistical methods to discover the tell-tale lexical and host-based properties of malicious Web site URLs. These methods are able to learn highly predictive models by extracting and automatically analyzing tens of thousands of features potentially indicative of suspicious URLs. The resulting classifiers obtain 95-99{\%} accuracy, detecting large numbers of malicious Web sites from their URLs, with only modest false positives.},
+author = {Ma, Justin and Saul, Lawrence K and Savage, Stefan and Voelker, Geoffrey M},
+file = {:C$\backslash$:/论文/feature selection/mal-url-kdd09(4).pdf:pdf},
+isbn = {9781605584959},
+journal = {World Wide Web Internet And Web Information Systems},
+keywords = {l1 regularization,malicious web sites,supervised learning},
+pages = {1245--1253},
+title = {{Beyond Blacklists : Learning to Detect Malicious Web Sites from Suspicious URLs}},
+url = {http://portal.acm.org/citation.cfm?id=1557153},
+year = {2009}
+}
+@article{Cao2017,
+abstract = {—In this paper, we propose a browser fingerprinting technique that can track users not only within a single browser but also across different browsers on the same machine. Specif-ically, our approach utilizes many novel OS and hardware level features, such as those from graphics cards, CPU, and installed writing scripts. We extract these features by asking browsers to perform tasks that rely on corresponding OS and hardware functionalities. Our evaluation shows that our approach can successfully identify 99.24{\%} of users as opposed to 90.84{\%} for state of the art on single-browser fingerprinting against the same dataset. Further, our approach can achieve higher uniqueness rate than the only cross-browser approach in the literature with similar stability.},
+author = {Cao, Yinzhi and Li, Song and Wijmans, Erik},
+file = {:C$\backslash$:/论文/fingerprint/ndss2017{\_}02B-3{\_}Cao{\_}paper.pdf:pdf},
+isbn = {1891562460},
+journal = {Proceedings of Network {\&} Distributed System Security Symposium (NDSS)},
+number = {March},
+title = {{( Cross- ) Browser Fingerprinting via OS and Hardware Level Features}},
+year = {2017}
+}
diff --git a/Biblio/ref.bib b/Biblio/ref.bib
new file mode 100644
index 0000000..03e40e2
--- /dev/null
+++ b/Biblio/ref.bib
@@ -0,0 +1,165 @@
+%---------------------------------------------------------------------------%
+%- -%
+%- Bibliography -%
+%- -%
+%---------------------------------------------------------------------------%
+@book{wikibook2014latex,
+ title={http://en.wikibooks.org/wiki/LaTeX},
+ author={Wikibook},
+ year={2014},
+ publisher={On-line Resources}
+}
+@book{lamport1986document,
+ title={Document Preparation System},
+ author={Lamport, Leslie},
+ year={1986},
+ publisher={Addison-Wesley Reading, MA}
+}
+@article{chen2005zhulu,
+ title={著录文后参考文献的规则及注意事项},
+ author={陈浩元},
+ key={Chen Hao Yuan},
+ journal={编辑学报},
+ volume={17},
+ number={6},
+ pages={413--415},
+ year={2005}
+}
+@book{chu2004tushu,
+ title={图书馆数字参考咨询服务研究},
+ author={初景利},
+ key={Chu Jing Li},
+ year={2004},
+ address={北京},
+ publisher={北京图书馆出版社}
+}
+@article{stamerjohanns2009mathml,
+ title={{MathML}-aware article conversion from {LaTeX}},
+ author={Stamerjohanns, Heinrich and Ginev, Deyan and David, Catalin and Misev, Dimitar and Zamdzhiev, Vladimir and Kohlhase, Michael},
+ journal={Towards a Digital Mathematics Library},
+ volume={16},
+ number={2},
+ pages={109--120},
+ year={2009},
+ publisher={Masaryk University Press}
+}
+@article{betts2005aging,
+ title={Aging reduces center-surround antagonism in visual motion processing},
+ author={Betts, Lisa R and Taylor, Christopher P},
+ journal={Neuron},
+ volume={45},
+ number={3},
+ pages={361--366},
+ year={2005},
+ publisher={Elsevier}
+}
+
+@article{bravo1990comparative,
+ title={Comparative study of visual inter and intrahemispheric cortico-cortical connections in five native Chilean rodents},
+ author={Bravo, Hermes and Olavarria, Jaime},
+ journal={Anatomy and embryology},
+ volume={181},
+ number={1},
+ pages={67--73},
+ year={1990},
+ publisher={Springer}
+}
+@book{hls2012jinji,
+ author = {哈里森·沃尔德伦},
+ key = {Haliseng Woerdelun},
+ translator = {谢远涛},
+ title = {经济数学与金融数学},
+ address = {北京},
+ publisher = {中国人民大学出版社},
+ year = {2012},
+ pages = {235--236},
+}
+@proceedings{niu2013zonghe,
+ editor = {牛志明 and 斯温兰德 and 雷光春},
+ key = {Niu Zhi Ming Siwenlande Lei Guang Chun},
+ title = {综合湿地管理国际研讨会论文集},
+ address = {北京},
+ publisher = {海洋出版社},
+ year = {2013},
+}
+@incollection{chen1980zhongguo,
+ author = {陈晋镳 and 张惠民 and 朱士兴 and 赵震 and
+ 王振刚},
+ key = {Chen Jing Ao Zhang Hui Ming Zhu Shi Xing Zhao Zhen Wang Zhen Gang},
+ title = {蓟县震旦亚界研究},
+ editor = {中国地质科学院天津地质矿产研究所},
+ booktitle = {中国震旦亚界},
+ address = {天津},
+ publisher = {天津科学技术出版社},
+ year = {1980},
+ pages = {56--114},
+}
+@article{yuan2012lana,
+ author = {袁训来 and 陈哲 and 肖书海},
+ key = {Yuan xun lai Chen zhe Xiao shu Hai},
+ title = {蓝田生物群: 一个认识多细胞生物起源和早期演化的新窗口 -- 篇一},
+ journal = {科学通报},
+ year = {2012},
+ volume = {57},
+ number = {34},
+ pages = {3219},
+}
+@article{yuan2012lanb,
+ author = {袁训来 and 陈哲 and 肖书海},
+ key = {Yuan xun lai Chen zhe Xiao shu Hai},
+ title = {蓝田生物群: 一个认识多细胞生物起源和早期演化的新窗口 -- 篇二},
+ journal = {科学通报},
+ year = {2012},
+ volume = {57},
+ number = {34},
+ pages = {3219},
+}
+@article{yuan2012lanc,
+ author = {袁训来 and 陈哲 and 肖书海},
+ key = {Yuan xun lai Chen zhe Xiao shu Hai},
+ title = {蓝田生物群: 一个认识多细胞生物起源和早期演化的新窗口 -- 篇三},
+ journal = {科学通报},
+ year = {2012},
+ volume = {57},
+ number = {34},
+ pages = {3219},
+}
+@article{walls2013drought,
+ author = {Walls, Susan C. and Barichivich, William J. and Brown, Mary
+ E.},
+ title = {Drought, deluge and declines: the impact of precipitation
+ extremes on amphibians in a changing climate},
+ journal = {Biology},
+ year = {2013},
+ volume = {2},
+ number = {1},
+ pages = {399--418},
+ urldate = {2013-11-04},
+ url = {http://www.mdpi.com/2079-7737/2/1/399},
+ doi = {10.3390/biology2010399},
+}
+@article{Bohan1928,
+ author = { ボハン, デ},
+ title = { 過去及び現在に於ける英国と会 },
+ journal = { 日本時報 },
+ year = { 1928 },
+ volume = { 17 },
+ pages = { 5-9 },
+ edition = { 9 },
+ hyphenation = { japanese },
+ language = { japanese }
+}
+
+@article{Dubrovin1906,
+ author = { Дубровин, А. И },
+ title = { Открытое письмо Председателя Главного Совета Союза Русского Народа Санкт-Петербургскому Антонию, Первенствующему члену Священного Синода },
+ journal = { Вече },
+ year = { 1906 },
+ volume = { },
+ edition = { 97 },
+ month = { 7 дек. 1906 },
+ pages = { 1-3 },
+ hyphenation = { russian },
+ language = { russian }
+}
+%---------------------------------------------------------------------------%
diff --git a/Img/80sizedistribution.png b/Img/80sizedistribution.png
new file mode 100644
index 0000000..93de2f1
--- /dev/null
+++ b/Img/80sizedistribution.png
Binary files differ
diff --git a/Img/Allaudio.png b/Img/Allaudio.png
new file mode 100644
index 0000000..b225b47
--- /dev/null
+++ b/Img/Allaudio.png
Binary files differ
diff --git a/Img/Content-based.png b/Img/Content-based.png
new file mode 100644
index 0000000..135d4a6
--- /dev/null
+++ b/Img/Content-based.png
Binary files differ
diff --git a/Img/Flowchartofpredictiveprogram.png b/Img/Flowchartofpredictiveprogram.png
new file mode 100644
index 0000000..73afa00
--- /dev/null
+++ b/Img/Flowchartofpredictiveprogram.png
Binary files differ
diff --git a/Img/Flowchartofrequest_ag.png b/Img/Flowchartofrequest_ag.png
new file mode 100644
index 0000000..bcd40ad
--- /dev/null
+++ b/Img/Flowchartofrequest_ag.png
Binary files differ
diff --git a/Img/Flowchartofrequest_fi.png b/Img/Flowchartofrequest_fi.png
new file mode 100644
index 0000000..a207c9d
--- /dev/null
+++ b/Img/Flowchartofrequest_fi.png
Binary files differ
diff --git a/Img/Frameworkforfeature.jpg b/Img/Frameworkforfeature.jpg
new file mode 100644
index 0000000..99dc2a5
--- /dev/null
+++ b/Img/Frameworkforfeature.jpg
Binary files differ
diff --git a/Img/Mobiledevice1.png b/Img/Mobiledevice1.png
new file mode 100644
index 0000000..aa238c7
--- /dev/null
+++ b/Img/Mobiledevice1.png
Binary files differ
diff --git a/Img/Mobiledevice2.png b/Img/Mobiledevice2.png
new file mode 100644
index 0000000..b762b17
--- /dev/null
+++ b/Img/Mobiledevice2.png
Binary files differ
diff --git a/Img/Multi-threaded.jpg b/Img/Multi-threaded.jpg
new file mode 100644
index 0000000..434de75
--- /dev/null
+++ b/Img/Multi-threaded.jpg
Binary files differ
diff --git a/Img/PACKalgorithm_re.jpg b/Img/PACKalgorithm_re.jpg
new file mode 100644
index 0000000..70907b0
--- /dev/null
+++ b/Img/PACKalgorithm_re.jpg
Binary files differ
diff --git a/Img/PACKalgorithm_se.jpg b/Img/PACKalgorithm_se.jpg
new file mode 100644
index 0000000..ca8b59d
--- /dev/null
+++ b/Img/PACKalgorithm_se.jpg
Binary files differ
diff --git a/Img/PACKstructure.jpg b/Img/PACKstructure.jpg
new file mode 100644
index 0000000..500ead4
--- /dev/null
+++ b/Img/PACKstructure.jpg
Binary files differ
diff --git a/Img/SFH.jpg b/Img/SFH.jpg
new file mode 100644
index 0000000..5b386c6
--- /dev/null
+++ b/Img/SFH.jpg
Binary files differ
diff --git a/Img/Schematicdiagram.jpg b/Img/Schematicdiagram.jpg
new file mode 100644
index 0000000..2ffd88d
--- /dev/null
+++ b/Img/Schematicdiagram.jpg
Binary files differ
diff --git a/Img/Systemstructure.jpg b/Img/Systemstructure.jpg
new file mode 100644
index 0000000..0a245c6
--- /dev/null
+++ b/Img/Systemstructure.jpg
Binary files differ
diff --git a/Img/Typicaldecision.jpg b/Img/Typicaldecision.jpg
new file mode 100644
index 0000000..740bf7c
--- /dev/null
+++ b/Img/Typicaldecision.jpg
Binary files differ
diff --git a/Img/handleout-of-order.jpg b/Img/handleout-of-order.jpg
new file mode 100644
index 0000000..3c22d00
--- /dev/null
+++ b/Img/handleout-of-order.jpg
Binary files differ
diff --git a/Img/numberflow.png b/Img/numberflow.png
new file mode 100644
index 0000000..dc98f78
--- /dev/null
+++ b/Img/numberflow.png
Binary files differ
diff --git a/Img/oaspl_a.pdf b/Img/oaspl_a.pdf
new file mode 100644
index 0000000..0748b93
--- /dev/null
+++ b/Img/oaspl_a.pdf
Binary files differ
diff --git a/Img/oaspl_b.pdf b/Img/oaspl_b.pdf
new file mode 100644
index 0000000..5816b28
--- /dev/null
+++ b/Img/oaspl_b.pdf
Binary files differ
diff --git a/Img/oaspl_c.pdf b/Img/oaspl_c.pdf
new file mode 100644
index 0000000..ab8e213
--- /dev/null
+++ b/Img/oaspl_c.pdf
Binary files differ
diff --git a/Img/oaspl_d.pdf b/Img/oaspl_d.pdf
new file mode 100644
index 0000000..3a4b590
--- /dev/null
+++ b/Img/oaspl_d.pdf
Binary files differ
diff --git a/Img/result_a.png b/Img/result_a.png
new file mode 100644
index 0000000..1ad2b3e
--- /dev/null
+++ b/Img/result_a.png
Binary files differ
diff --git a/Img/result_b.png b/Img/result_b.png
new file mode 100644
index 0000000..9068b38
--- /dev/null
+++ b/Img/result_b.png
Binary files differ
diff --git a/Img/result_c.png b/Img/result_c.png
new file mode 100644
index 0000000..567cefe
--- /dev/null
+++ b/Img/result_c.png
Binary files differ
diff --git a/Img/result_d.png b/Img/result_d.png
new file mode 100644
index 0000000..81c041f
--- /dev/null
+++ b/Img/result_d.png
Binary files differ
diff --git a/Img/route.jpg b/Img/route.jpg
new file mode 100644
index 0000000..6cae047
--- /dev/null
+++ b/Img/route.jpg
Binary files differ
diff --git a/Img/shock_cyn.jpg b/Img/shock_cyn.jpg
new file mode 100644
index 0000000..214ad78
--- /dev/null
+++ b/Img/shock_cyn.jpg
Binary files differ
diff --git a/Img/systemframework.png b/Img/systemframework.png
new file mode 100644
index 0000000..06207b0
--- /dev/null
+++ b/Img/systemframework.png
Binary files differ
diff --git a/Img/tc_q_criteria.jpg b/Img/tc_q_criteria.jpg
new file mode 100644
index 0000000..72dbe76
--- /dev/null
+++ b/Img/tc_q_criteria.jpg
Binary files differ
diff --git a/Img/test.png b/Img/test.png
new file mode 100644
index 0000000..12f55ba
--- /dev/null
+++ b/Img/test.png
Binary files differ
diff --git a/Img/ucas_logo.pdf b/Img/ucas_logo.pdf
new file mode 100644
index 0000000..c3abbdb
--- /dev/null
+++ b/Img/ucas_logo.pdf
Binary files differ
diff --git a/Img/youtube.png b/Img/youtube.png
new file mode 100644
index 0000000..4287aa3
--- /dev/null
+++ b/Img/youtube.png
Binary files differ
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..433e266
--- /dev/null
+++ b/README.md
@@ -0,0 +1,55 @@
+# `ucasthesis` 国科大学位论文 LaTeX 模板 [最新样式]
+
+## 模板下载
+
+请在页面右边点击:**Clone or download -> Download Zip**
+
+## 重要建议
+
+* 关于 LaTeX 的知识性问题,请查阅 [ucasthesis 知识小站](https://github.com/mohuangrui/ucasthesis/wiki) 和 [LaTeX Wikibook](https://en.wikibooks.org/wiki/LaTeX)。
+* 关于模板编译和样式设计的问题,请先仔细阅读 **模板使用说明.pdf** 并遵从提问流程。
+* 使用邮件传播 ucasthesis 时,请先删除 `artratex.bat` 以防范 Dos 脚本的潜在风险。
+* 开题报告请见:[ucasproposal: 中国科学院大学开题报告 LaTeX 模板](https://github.com/mohuangrui/ucasproposal)。
+* 书脊制作请见:[latexspine: LaTeX 书脊模板](https://github.com/mohuangrui/latexspine)。
+
+## 模板简介
+
+* ucasthesis 为撰写中国科学院大学本、硕、博学位论文的 LaTeX 模版。ucasthesis 提供了简单明了的**模板使用说明.pdf**。无论你是否具有 LaTeX 使用经验,都可较为轻松地使用以完成学位论文的撰写和排版。谢谢大家的测试、反馈和支持,我们一起的努力让 ucasthesis 非常荣幸地得到了国科大本科部陆晴老师、本科部学位办丁云云老师和中科院数学与系统科学研究院吴凌云研究员的支持,并得到吴凌云学长在 [CTEX](http://www.ctex.org/HomePage) 的发布。
+
+* 考虑到许多同学可能缺乏 LaTeX 使用经验,ucasthesis 将 LaTeX 的复杂性高度封装,开放出简单的接口,以便轻易使用。同时,对用 LaTeX 撰写论文的一些主要难题,如制图、制表、文献索引等,进行了详细说明,并提供了相应的代码样本,理解了上述问题后,对于初学者而言,使用此模板撰写学位论文将不存在实质性的困难。所以,如果你是初学者,请不要直接放弃,因为同样为初学者的我,十分明白让 LaTeX 简单易用的重要性,而这正是 ucasthesis 所追求和体现的。
+
+* 此中国科学院大学学位论文模板 ucasthesis 基于中科院数学与系统科学研究院吴凌云研究员的 CASthesis 模板发展而来。当前 ucasthesis 模板满足最新的中国科学院大学学位论文撰写要求和封面设定。兼顾操作系统:Windows,Linux,MacOS 和 LaTeX 编译引擎:pdflatex,xelatex,lualatex。支持中文书签、中文渲染、中文粗体显示、拷贝 PDF 中的文本到其他文本编辑器等特性。此外,对模板的文档结构进行了精心设计,撰写了编译脚本提高模板的易用性和使用效率。
+
+* ucasthesis 的目标在于简化学位论文的撰写,利用 LaTeX 格式与内容分离的特征,模板将格式设计好后,作者可只需关注论文内容。 同时,ucasthesis 有着整洁一致的代码结构和扼要的注解,对文档的仔细阅读可为初学者提供一个学习 LaTeX 的窗口。此外,模板的架构十分注重通用性,事实上,ucasthesis 不仅是国科大学位论文模板,同时,通过少量修改即可成为使用 LaTeX 撰写中英文文章或书籍的通用模板,并为使用者的个性化设定提供了接口。
+
+## 重要通知
+
+* `2019-03-12` 模板样式进行了修改,请查看下面的修改描述,以决定是否需要更新。
+
+## 更新记录
+
+* `2019-03-12` 根据 [opt-gaobin, issue #121](https://github.com/mohuangrui/ucasthesis/issues/121) ,修正中文标点使下划线断掉的问题。
+
+* `2019-02-20` 根据 [opt-gaobin, issue #100](https://github.com/mohuangrui/ucasthesis/issues/100) ,增加定理、定义、证明等数学环境。根据 [DRjy, issue #102](https://github.com/mohuangrui/ucasthesis/issues/102) ,调整 \mathcal 字体样式。根据 [zike Liu, email] ,适当缩减目录列表的缩进。根据 [xiaoyaoE, issue #105](https://github.com/mohuangrui/ucasthesis/issues/105) ,使数字字体和英文字体一致。完善中文版和国际版之间的中英格式切换。
+
+* `2019-01-10` 根据 [mnpengjk, issue #57](https://github.com/mohuangrui/ucasthesis/issues/57) , 将公式编号前加点纳入模版默认,更多讨论可见:[琐屑细节](https://github.com/mohuangrui/ucasthesis/wiki/琐屑细节) 。根据 [yunyun2019, issue #95](https://github.com/mohuangrui/ucasthesis/issues/95) ,采用 [zepinglee](https://github.com/zepinglee) 基于国标样式为 ucas 所定制文献样式:[ucas 样式分支](https://github.com/CTeX-org/gbt7714-bibtex-style/tree/ucas) ,文献样式更多讨论可见:[文献样式](https://github.com/mohuangrui/ucasthesis/wiki/%E6%96%87%E7%8C%AE%E6%A0%B7%E5%BC%8F)。根据 [邵岳林, email] ,将附录复原为常规的排版设置,若需将附录置于参考文献后,请见:[琐屑细节](https://github.com/mohuangrui/ucasthesis/wiki/琐屑细节)。
+
+* `2018-04-03` 根据国科大本科部陆晴老师和本科部学位办丁云云老师的复审审核建议再次修复一些样式细节问题。
+
+* `2018-04-02` 模板进行了重大更新,修复了样式、字体、格式等许多问题。
+
+ * 根据国科大本科部陆晴老师的建议对模版样式进行了诸多拓展和修正,并完善对本科生论文元素的兼容性。
+ * 在 [ctex](https://github.com/CTeX-org/ctex-kit) 开发者的帮助下解决了如何多次调用 Times New Roman 而不导致黑体调用错误的问题。根据 [twn1993, email],修复默认黑体为微软雅黑而不是SimHei的问题。
+ * 繁复折腾测试后终于找出一个在ctex默认黑体替换粗宋体设定环境内全局AutoFakeBold失效状态下折衷特定字体库不全条件下生僻字显示和系统默认字重不全条件下粗宋体显示以及不同操作系统下如何平衡上述字库自重矛盾还有根据操作系统自动调用所带有的Times字体的方案。祝贺大家不用再为字体调用苦恼了。
+ * 设定论文封面据英文学位名如 Bachelor,Master,或 Doctor 自动切换。密级据是否填写自动显示。
+
+* `2018-03-22` 演示表标题居表上,加粗图表标注,设置长图表标题悬挂缩进(由于 bicaption 宏包无法正确接受 caption 宏包的 margin 选项,图表中英标题第一行无法正确同步缩进,从而放弃第一行的缩进),强调多图中子图标题的规范使用,通过摘要和符号列表演示标题不在目录中显示却仍在页眉中显示。根据 [赵永明, email],设置双语图表标题和 bicaption 不在图形列表和表格列表中显示英文标题。
+
+* `2018-03-21` 根据 [zhanglinbo, issue #42](https://github.com/mohuangrui/ucasthesis/issues/42) ,使用 [xiaoyao9933](https://github.com/xiaoyao9933/UCASthesis) 制作的 ucas_logo.pdf 使学校 logo 放大不失真。根据 [Starsky Wong, issue #41](https://github.com/mohuangrui/ucasthesis/issues/41) ,设置标题英文设为 Times New Roman 。根据 [will0n, issue #29](https://github.com/mohuangrui/ucasthesis/issues/29) ,[Man-Ting-Fang, issue #26](https://github.com/mohuangrui/ucasthesis/issues/26) ,[diyiliaoya, issue #12](https://github.com/mohuangrui/ucasthesis/issues/12) ,和 [赵永明, email] ,矫正一些格式细节问题。根据 [tangjie1992, issue #30](https://github.com/mohuangrui/ucasthesis/issues/30) ,配置算法环境。
+
+* `2018-02-04` 在 [ctex](https://github.com/CTeX-org/ctex-kit) 开发者的帮助下修复误用字体命令导致的粗宋体异常。然后,将模板兼容性进一步扩展为兼容操作系统 Windows,Linux,MacOS 和 LaTeX 编译引擎 pdflatex,xelatex,lualatex。移除 microtype 宏包以提高编译效率。
+
+* `2018-01-28` 基于国科大 2018 新版论文规范进行了重大修改,采用新的封面、声明、页眉页脚样式。展示标题中使用数学公式。
+
+* `2017-05-14` 根据 [赵永明, email] ,增加 \citepns{} 和 \citetns{} 命令提供上标引用下混合非上标引用的需求。根据 [臧光明, email] ,添加设定论文为 thesis 或 dissertation 的命令。
+
diff --git a/Style/artracom.sty b/Style/artracom.sty
new file mode 100644
index 0000000..ab6629d
--- /dev/null
+++ b/Style/artracom.sty
@@ -0,0 +1,94 @@
+%---------------------------------------------------------------------------%
+%- -%
+%- User-defined Commands -%
+%- -%
+%---------------------------------------------------------------------------%
+%- Copyright (C) Huangrui Mo <[email protected]>
+%- This is free software: you can redistribute it and/or modify it
+%- under the terms of the GNU General Public License as published by
+%- the Free Software Foundation, either version 3 of the License, or
+%- (at your option) any later version.
+%---------------------------------------------------------------------------%
+%->> Identification
+%---------------------------------------------------------------------------%
+\NeedsTeXFormat{LaTeX2e}%
+\ProvidesPackage{artracom}[2014/10/01 v0.1 LaTeX macros package]%
+%---------------------------------------------------------------------------%
+%->> Declare options
+%---------------------------------------------------------------------------%
+%-
+%-> Handle non-implemented options
+%-
+\DeclareOption*{%
+ \PackageWarning{artracom}{Unknown option '\CurrentOption'}%
+}
+%-
+%-> Terminates all options processing
+%-
+\ProcessOptions\relax%
+%---------------------------------------------------------------------------%
+%->> User defined commands
+%---------------------------------------------------------------------------%
+%-
+%-> General information
+%-
+%- \newcommand: defines a new command, makes an error if already defined
+%- \renewcommand: redefines a predefined command, makes an error if not yet defined
+%- \providecommand: defines a new command, if already defined, keep old definition
+%- *-forms: enable error check for missing arguments or arguments contain \par
+%- \def: define a command whether new or predefined, equivalent to
+%- \providecommand{name}{}%
+%- \renewcommand*{name}[number of arguments][default value]{definition}
+%- \long\def: define a command whether new or predefined, equivalent to
+%- \providecommand{name}{}%
+%- \renewcommand{name}[number of arguments][default value]{definition}
+%- *-forms that are usually the better form to use when defining commands with
+%- arguments, unless any of these arguments is intended to contain whole paragraphs
+%- of text. If you need to use the non-star form then you should ask whether that
+%- argument better be treated as the contents of a suitably defined environment.
+%-
+%-> Math functions
+%-
+%- International standard layout rules (from isomath package)
+%- The overall rule is that symbols representing math quantities or variables should
+%- be italicised, symbols representing units or labels are unitalicised (roman).
+%- Symbols for vectors and matrices are bold italic, symbols for tensors are
+%- sans-serif bold italic.
+%- The above rules apply equally to letter symbols from the Greek and
+%- the Latin alphabet.
+%- More information may be found in <<The LaTeX Mathematics Companion>>
+%- However, math typefaces vary from field to field. To keep consistent typography
+%- and easy adaption, it it always best to create a corresponding command for
+%- variables in each math category.
+%\providecommand{\Scalar}[1]{#1}% scalar is the normal math typeset, italic
+\providecommand{\Vector}[1]{\boldsymbol{#1}}% general vectors in bold italic
+%\renewcommand{\vec}[1]{\Vector{#1}}% redefine predefined vector if needed
+\providecommand{\unitVector}[1]{\boldsymbol{\mathbf{#1}}}% unit vectors in bold roman
+\providecommand{\Tensor}[1]{\boldsymbol{\mathsf{#1}}}% tensor in sans-serif bold italic
+\providecommand{\unitTensor}[1]{\boldsymbol{{\mathsf{#1}}}}% identity tensor in sans-serif bold
+\providecommand{\Matrix}[1]{\boldsymbol{\mathbf{#1}}}% matrix in bold roman
+\providecommand{\unitMatrix}[1]{\boldsymbol{\mathbf{#1}}}% identity matrix in bold roman
+\providecommand{\Unit}[1]{\,\mathrm{#1}}% units in roman
+\providecommand{\Const}[1]{\mathrm{#1}}% math constants, functions
+\providecommand{\Set}[1]{\mathbb{#1}}% special sets in blackboard bold
+\providecommand{\Div}{\operatorname{div}}% divergence operator
+\providecommand{\Order}{\operatorname{O}}% order operator
+\providecommand{\Trace}{\operatorname{tr}}% trace operator
+\providecommand{\Diag}{\operatorname{diag}}% diagonal
+\providecommand{\Def}{\operatorname{def}}% define
+\providecommand{\Loptr}{\operatorname{\mathcal{L}}}% spatial operator
+\providecommand{\Toptr}{\operatorname{\mathcal{LL}}}% temporal operator
+\providecommand{\Soptr}{\operatorname{\mathcal{S}}}% solution operator
+\providecommand{\Des}[1]{\mathrm{#1}}% descriptive superscripts and subscripts in roman type
+%-
+%-> Graphical length factor
+%-
+\providecommand*{\MyFactor}{0.6}% for single figure
+\providecommand*{\MySubFactor}{0.45}% for subfigure
+%-
+%-> Add dots before equation number
+%-
+\newcommand{\adddotsbeforeeqnnum}{\def\maketag@@@##1{\hbox{\m@th\normalfont\dots\ ##1}}}%
+%---------------------------------------------------------------------------%
+\endinput
+
diff --git a/Style/artratex.sty b/Style/artratex.sty
new file mode 100644
index 0000000..8200796
--- /dev/null
+++ b/Style/artratex.sty
@@ -0,0 +1,660 @@
+%---------------------------------------------------------------------------%
+%- -%
+%- Document Style -%
+%- -%
+%---------------------------------------------------------------------------%
+%- Copyright (C) Huangrui Mo <[email protected]>
+%- This is free software: you can redistribute it and/or modify it
+%- under the terms of the GNU General Public License as published by
+%- the Free Software Foundation, either version 3 of the License, or
+%- (at your option) any later version.
+%---------------------------------------------------------------------------%
+%->> Identification
+%---------------------------------------------------------------------------%
+\NeedsTeXFormat{LaTeX2e}%
+\ProvidesPackage{artratex}[2014/10/01 v0.1 LaTeX macros package]%
+%---------------------------------------------------------------------------%
+%->> Declare options
+%---------------------------------------------------------------------------%
+%-
+%-> Platform fontset <windows>, <mac>, <adobe>, <times>, <others>
+%-
+\RequirePackage{expl3}% LaTeX3 programming environment
+\ExplSyntaxOn%
+\providecommand{\g__ctex_fontset_tl}{}% platform fontset state variable
+\edef\artxfontset{\g__ctex_fontset_tl}% expanded platform fontset state variable
+\ExplSyntaxOff%
+\newif\ifartx@windows \artx@windowsfalse
+\newif\ifartx@mac \artx@macfalse
+\newif\ifartx@adobe \artx@adobefalse
+\newif\ifartx@times \artx@timesfalse
+\newif\ifartx@others \artx@othersfalse
+\RequirePackage{etoolbox}% a toolbox of programming facilities
+\newcommand{\artxifstreq}{\expandafter\ifstrequal\expandafter}% expansion control
+\artxifstreq{\artxfontset}{windows}{\artx@windowstrue\artx@timestrue}{%
+\artxifstreq{\artxfontset}{mac}{\artx@mactrue\artx@timestrue}{%
+\artxifstreq{\artxfontset}{adobe}{\artx@adobetrue\artx@timestrue}{%
+\artx@otherstrue\artx@timesfalse}}}
+%-
+%-> LaTeX engine <pdflatex>, <lualatex>, <xelatex>
+%-
+\newif\ifartx@pdftex \artx@pdftexfalse
+\newif\ifartx@luatex \artx@luatexfalse
+\newif\ifartx@xetex \artx@xetexfalse
+\RequirePackage{ifxetex,ifluatex}% LaTeX engine detection
+\ifxetex%
+ \artx@xetextrue
+ \RequirePackage{xeCJK}% support calling system fonts
+\else\ifluatex%
+ \artx@luatextrue
+\else%
+ \artx@pdftextrue
+\fi\fi%
+%-
+%-> Bibliography engine <bibtex>, <biber>
+%-
+\newif\ifartx@bibtex \artx@bibtextrue
+\newif\ifartx@biber \artx@biberfalse
+\DeclareOption{bibtex}{%
+ \artx@bibtextrue
+ \artx@biberfalse
+}
+\DeclareOption{biber}{%
+ \artx@bibtexfalse
+ \artx@bibertrue
+}
+%-
+%-> Citation and reference style
+%-
+\newif\ifartx@numbers \artx@numberstrue
+\newif\ifartx@super \artx@superfalse
+\newif\ifartx@authoryear \artx@authoryearfalse
+\newif\ifartx@alpha \artx@alphafalse
+\DeclareOption{numbers}{%
+ \artx@numberstrue
+ \artx@superfalse
+ \artx@authoryearfalse
+ \artx@alphafalse
+}
+\DeclareOption{super}{%
+ \artx@numberstrue
+ \artx@supertrue
+ \artx@authoryearfalse
+ \artx@alphafalse
+}
+\DeclareOption{authoryear}{%
+ \artx@numbersfalse
+ \artx@superfalse
+ \artx@authoryeartrue
+ \artx@alphafalse
+}
+\DeclareOption{alpha}{%
+ \artx@numbersfalse
+ \artx@superfalse
+ \artx@authoryearfalse
+ \artx@alphatrue
+}
+%-
+%-> Page layout reconfiguration
+%-
+\newif\ifartx@geometry \artx@geometryfalse
+\DeclareOption{geometry}{%
+ \artx@geometrytrue
+}
+%-
+%-> Landscape layout support
+%-
+\newif\ifartx@lscape \artx@lscapefalse
+\DeclareOption{lscape}{%
+ \artx@lscapetrue
+}
+%-
+%-> Header and footer
+%-
+\newif\ifartx@myhdr \artx@myhdrfalse
+\DeclareOption{myhdr}{%
+ \artx@myhdrtrue
+}
+%-
+%-> Color support
+%-
+\newif\ifartx@color \artx@colorfalse
+\DeclareOption{color}{%
+ \artx@colortrue
+}
+%-
+%-> Page background
+%-
+\newif\ifartx@background \artx@backgroundfalse
+\DeclareOption{background}{%
+ \artx@colortrue
+ \artx@backgroundtrue
+}
+%-
+%-> Complex diagrams support
+%-
+\newif\ifartx@tikz \artx@tikzfalse
+\DeclareOption{tikz}{%
+ \artx@colortrue
+ \artx@tikztrue
+}
+%-
+%-> Complex tables support
+%-
+\newif\ifartx@table \artx@tablefalse
+\DeclareOption{table}{%
+ \artx@tabletrue
+}
+%-
+%-> Enhanced list
+%-
+\newif\ifartx@list \artx@listfalse
+\DeclareOption{list}{%
+ \artx@colortrue
+ \artx@listtrue
+}
+%-
+%-> Extra math support
+%-
+\newif\ifartx@math \artx@mathfalse
+\DeclareOption{math}{%
+ \artx@mathtrue
+}
+%-
+%-> Handle non-implemented options
+%-
+\DeclareOption*{%
+ \PackageWarning{artratex}{Unknown option '\CurrentOption'}%
+}
+%-
+%-> Terminates all options processing
+%-
+\ProcessOptions\relax%
+%---------------------------------------------------------------------------%
+%->> Detect class and counter
+%---------------------------------------------------------------------------%
+\newcommand*{\ifcounter}[1]{% check if counter exists
+ \ifcsname c@#1\endcsname%
+ \expandafter\@firstoftwo%
+ \else%
+ \expandafter\@secondoftwo%
+ \fi
+}
+\ifcounter{chapter}{\def\maincounter{chapter}}{\def\maincounter{section}}
+%---------------------------------------------------------------------------%
+%->> Required packages
+%---------------------------------------------------------------------------%
+%-
+%-> Math packages
+%-
+\RequirePackage{amsmath,amsthm,amssymb}% math structures, theorems, and extended symbols
+\theoremstyle{plain}% for theorems, lemmas, propositions, etc
+\providecommand{\theoremname}{Theorem}%
+\newtheorem{theorem}{\theoremname}[\maincounter]
+\providecommand{\axiomname}{Axiom}%
+\newtheorem{axiom}[theorem]{\axiomname}
+\providecommand{\lemmaname}{Lemma}%
+\newtheorem{lemma}[theorem]{\lemmaname}
+\providecommand{\corollaryname}{Corollary}%
+\newtheorem{corollary}[theorem]{\corollaryname}
+\providecommand{\assertionname}{Assertion}%
+\newtheorem{assertion}[theorem]{\assertionname}
+\providecommand{\propositionname}{Proposition}%
+\newtheorem{proposition}[theorem]{\propositionname}
+\providecommand{\conjecturename}{Conjecture}%
+\newtheorem{conjecture}[theorem]{\conjecturename}
+\theoremstyle{definition}% for definitions and examples
+\providecommand{\definitionname}{Definition}%
+\newtheorem{definition}{\definitionname}[\maincounter]
+\providecommand{\examplename}{Example}%
+\newtheorem{example}{\examplename}[\maincounter]
+\theoremstyle{remark}% for remarks and notes
+\providecommand{\remarkname}{Remark}%
+\newtheorem*{remark}{\remarkname}
+
+\ifartx@math% extra math packages
+ \RequirePackage{mathtools}% extension to amsmath
+\fi
+%-
+%-> Language settings
+%-
+\ifartx@pdftex% <pdflatex> call font packages
+ \RequirePackage[utf8]{inputenc}% set input encoding, document must use utf-8 encoding
+ \RequirePackage[T1]{fontenc}% set font encoding to enable modern font encoding
+ %- Text font: Chinese
+ %\RequirePackage{zhmCJK}% support calling system fonts
+ %\ifartx@windows%
+ % \setCJKmainfont[AutoFakeBold,ItalicFont=simkai.ttf]{simsun.ttc}%
+ % \setCJKsansfont[AutoFakeBold]{simhei.ttf}%
+ % \setCJKmonofont{simfang.ttf}%
+ %\fi
+ %- Text font: English <default: computer modern|others: font packages>
+ \RequirePackage{newtxtext}% main font, <times font: newtxtext|others: palatino>
+ %- Math font: <default: computer modern|others: font packages>
+ \RequirePackage[cmintegrals]{newtxmath}% times font, load after amsmath and newtxtext packages
+ \RequirePackage{mathrsfs}% enable \mathscr for script alphabet
+ \RequirePackage[cal=cm]{mathalfa}% map styles for calligraphic \mathcal and script \mathscr alphabet
+\else% <xelatex> or <lualatex> call system fonts
+ \RequirePackage{fontspec}% support calling system fonts
+ %- Font properties: <family> + <weight> + <shape> + <size>
+ %- Specify the three default TeX font families: <main>, <sans>, <mono>
+ %- \setxxxxfont{<font>}[BoldFont=<font-b>,ItalicFont=<font-i>]%
+ %- <font>: \mdseries\upshape; <font-b>: \bfseries\upshape; <font-i>: \mdseries\itshape
+ %- <\lfseries|\mdseries|\bfseries>: weight of font, default <\mdseries>
+ %- <\upshape|\itshape|\scshape>: shape of font, default <\upshape>
+ %- Roman or Serif - typefaces with strokes - for main content
+ %- examples: Times New Roman, Garamond, Adobe Garamond Pro
+ %\setmainfont{Garamond}[BoldFont=Garamond-Bold,ItalicFont=Garamond-Italic]%
+ %- font switches: {\rmfamily ...}, \textrm{...}, {\normalfont ...}, \textnormal{...}
+ %- the latter is due to \renewcommand*{\familydefault}{\rmdefault}
+ %- Sans serif - typefaces without strokes - for headings demanding high readability
+ %- examples: Arial, Helvetica, Gill Sans, Futura
+ %\setsansfont{Gill Sans MT}% font switches: {\sffamily ...}, \textsf{...}
+ %- Monospaced - typefaces with same width - for programming, etc
+ %- examples: Rockwell, Andale Mono, Courier
+ %\setmonofont{Rockwell}% font switches: {\ttfamily, ...}, \texttt{...}
+ %- Specify user-defined font families and font switches
+ %- without NFSSFamily option, a font family is defined only if adding a new font name
+ %\newfontfamily\<font-switch>{<font>}[<font features>]%
+ %\newcommand{\text<font-switch>}[1]{{\<font-switch> #1}}%
+ %- Text font: Chinese
+ \ifartx@windows%
+ \setCJKmainfont[AutoFakeBold,ItalicFont=KaiTi]{SimSun}%
+ \setCJKsansfont[AutoFakeBold]{SimHei}%
+ \setCJKmonofont{FangSong}%
+ \else\ifartx@mac%
+ \setCJKmainfont[ItalicFont=Kaiti SC,BoldItalicFont=Kaiti SC Bold]{Songti SC Light}%
+ \setCJKsansfont{Heiti SC}%
+ \setCJKmonofont{STFangsong}%
+ \else\ifartx@adobe%
+ \setCJKmainfont[AutoFakeBold,ItalicFont=AdobeKaitiStd-Regular]{AdobeSongStd-Light}%
+ \setCJKsansfont[AutoFakeBold]{AdobeHeitiStd-Regular}%
+ \setCJKmonofont{AdobeFangsongStd-Regular}%
+ \fi\fi\fi
+ %- Text font: English <default: computer modern|fontspec: system fonts|others: font packages>
+ \ifartx@times%
+ \setmainfont[NFSSFamily=entextrm]{Times New Roman}%
+ \setsansfont[NFSSFamily=entextsf]{Times New Roman}%
+ %\setmonofont[NFSSFamily=entexttt]{Courier New}%
+ \else
+ \setmainfont[NFSSFamily=entextrm]{FreeSerif}%
+ \setsansfont[NFSSFamily=entextsf]{FreeSerif}%
+ %\setmonofont[NFSSFamily=entexttt]{FreeMono}%
+ \fi
+ %\RequirePackage{newtxtext}% main font
+ %- Math font: <default: computer modern|others: font packages <newtxmath|unicode-math>>
+ \RequirePackage[cmintegrals]{newtxmath}% times font, load after amsmath and newtxtext packages
+ \DeclareSymbolFont{operators}{OT1}{ntxtlf}{m}{n}% fix numbers by setting operator font to newtx-family
+ \SetSymbolFont{operators}{bold}{OT1}{ntxtlf}{b}{n}% fix numbers by setting operator font to newtx-family
+ \AtBeginDocument{%
+ \DeclareMathSymbol{0}{\mathalpha}{operators}{`0}%
+ \DeclareMathSymbol{1}{\mathalpha}{operators}{`1}%
+ \DeclareMathSymbol{2}{\mathalpha}{operators}{`2}%
+ \DeclareMathSymbol{3}{\mathalpha}{operators}{`3}%
+ \DeclareMathSymbol{4}{\mathalpha}{operators}{`4}%
+ \DeclareMathSymbol{5}{\mathalpha}{operators}{`5}%
+ \DeclareMathSymbol{6}{\mathalpha}{operators}{`6}%
+ \DeclareMathSymbol{7}{\mathalpha}{operators}{`7}%
+ \DeclareMathSymbol{8}{\mathalpha}{operators}{`8}%
+ \DeclareMathSymbol{9}{\mathalpha}{operators}{`9}%
+ }
+ \RequirePackage{mathrsfs}% enable \mathscr for script alphabet
+ \RequirePackage[cal=cm]{mathalfa}% map styles for calligraphic \mathcal and script \mathscr alphabet
+ %\RequirePackage{unicode-math}% another math font configuration
+ %\setmathfont{XITS Math}% a complete symbol set for STIX math fonts
+ %- mathrm and mathsf can be set by \setmathrm and \setmathsf
+\fi
+%-
+%-> Bibliography processor and package
+%-
+%- Bibtex processor + natbib package
+\ifartx@bibtex%
+ \ifartx@numbers% enable numbered citation style
+ \ifartx@super% enable superscripted citation style
+ \RequirePackage[square,comma,super,sort&compress]{natbib}% superscripted square bracket
+ \else
+ \RequirePackage[square,comma,numbers,sort&compress]{natbib}% square bracket
+ \fi
+ \bibliographystyle{Biblio/gbt7714-unsrt}% numbered scheme
+ \fi
+ \ifartx@authoryear% enable author year citation style
+ \RequirePackage{natbib}% author year citation mode
+ \bibliographystyle{Biblio/gbt7714-plain}% author year scheme
+ \fi
+ \ifartx@alpha% enable alpha citation style
+ \RequirePackage[square,comma,numbers]{natbib}% square bracket
+ \bibliographystyle{alpha}% alpha scheme
+ \fi
+ \providecommand*{\citetns}[2][]{% text embedded \citet in superscripted mode
+ \begingroup%
+ \let\NAT@mbox=\mbox%
+ \let\@cite\NAT@citenum%
+ \let\NAT@space\NAT@spacechar%
+ \let\NAT@super@kern\relax%
+ \renewcommand\NAT@open{[}%
+ \renewcommand\NAT@close{]}%
+ \citet[#1]{#2}%
+ \endgroup%
+ }
+ \providecommand*{\citepns}[2][]{% text embedded \citep in superscripted mode
+ \begingroup%
+ \let\NAT@mbox=\mbox%
+ \let\@cite\NAT@citenum%
+ \let\NAT@space\NAT@spacechar%
+ \let\NAT@super@kern\relax%
+ \renewcommand\NAT@open{[}%
+ \renewcommand\NAT@close{]}%
+ \citep[#1]{#2}%
+ \endgroup%
+ }
+\fi
+%- Biber processor + biblatex package
+\ifartx@biber%
+ \ifartx@numbers% enable numbered citation style
+ \ifartx@super% enable superscripted citation style
+ \RequirePackage[style=numeric-comp]{biblatex}%
+ \else
+ \RequirePackage[style=numeric-comp]{biblatex}%
+ \fi
+ \fi
+ \ifartx@authoryear% enable author year citation style
+ \RequirePackage[style=authoryear]{biblatex}%
+ \fi
+ \ifartx@alpha% enable alpha citation style
+ \RequirePackage[style=alphabetic]{biblatex}%
+ \fi
+ \addbibresource{ref.bib}%
+\fi
+%-
+%-> Figure environment support
+%-
+\RequirePackage{graphicx}% packages for including graphics
+\RequirePackage[font={small,bf},skip=8pt,labelsep=space]{caption}% options: [margin=10pt,labelfont=bf]
+\RequirePackage{subcaption}% package for subfigures
+\RequirePackage[list=off]{bicaption}% package for binary captions
+\captionsetup[figure][bi-first]{format=hang,hangindent=-0.5em}%
+\captionsetup[figure][bi-second]{format=hang,hangindent=-2em,name=Figure}%
+\captionsetup[table][bi-first]{format=hang,hangindent=-0.5em}%
+\captionsetup[table][bi-second]{format=hang,hangindent=-2em,name=Table}%
+\RequirePackage[section]{placeins}% prevent floats from being moved over section
+%-
+%-> Page layout and spacing
+%-
+\ifartx@geometry% enable geometry to redefine page layout
+ \RequirePackage{geometry}% page layout
+ %\RequirePackage{setspace}% line spacing
+\fi
+\ifartx@lscape% landscape layout
+ \RequirePackage{fancyhdr}% fancy headers and footers
+ %- usage: \begin{landscape} [\thispagestyle{lscape}] text... \end{landscape}
+ \RequirePackage{pdflscape}% landscape environment
+ \RequirePackage[absolute]{textpos}% rotated page number
+\fi
+\ifartx@myhdr% header and footer style
+ \RequirePackage{fancyhdr}% fancy headers and footers
+\fi
+%\RequirePackage{microtype}% improves general appearance of the text
+%-
+%-> Color
+%-
+\ifartx@color% enable color package to use color
+ %\RequirePackage{color}%
+ \RequirePackage[usenames,dvipsnames,table]{xcolor}%
+\fi
+%-
+%-> Draw graphics directly with TeX commands
+%-
+\ifartx@tikz%
+ \RequirePackage{tikz}% automatically load pgf package
+ \usetikzlibrary{% load libraries
+ positioning,
+ arrows,
+ calc,
+ trees
+ }%
+\fi
+%-
+%-> Complex tables
+%-
+\ifartx@table%
+ \RequirePackage{ctable}% imports the array, tabularx and booktabs packages
+\fi
+%-
+%-> List structures
+%-
+\ifartx@list% enable enhanced list and verbatim structures
+ \RequirePackage{verbatim}% improve verbatim environment
+ \RequirePackage{enumitem}% configure the enumerate environment
+ \setlist[enumerate]{wide=\parindent}% only indent the first line
+ \setlist[itemize]{wide=\parindent}% only indent the first line
+ \setlist{nosep}% default text spacing
+ \RequirePackage{listings}% source code
+ \RequirePackage{algpseudocode,algorithm,algorithmicx}% algorithm
+ \providecommand{\algname}{Algorithm}%
+ \renewcommand*{\ALG@name}{\algname}% rename label
+\fi
+%-
+%-> Links support
+%-
+\RequirePackage{hyperref}%
+\hypersetup{% set hyperlinks
+ %bookmarks=true,% show bookmarks bar
+ pdfencoding=auto,% allows non-Latin based languages in bookmarks
+ %pdftitle={},% title
+ %pdfauthor={},% author
+ %pdfsubject={},% subject
+ %pdftoolbar=true,% show toolbar
+ %pdfmenubar=true,% show menu
+ pdffitwindow=false,% window fit to page when opened
+ pdfstartview={FitH},% fits the width of the page to the window
+ %pdfnewwindow=true,% links in new window
+ %backref=true,% do bibliographical back references
+ %pagebackref=true,% backreference by page number
+ colorlinks=true,% false: boxed links; true: colored links
+ linkcolor=black,% color of internal links
+ citecolor=blue,% color of links to bibliography
+ %filecolor=magenta,% color of file links
+ urlcolor=red,% color of external links
+ bookmarksnumbered=true,% put section numbers in bookmarks
+ %hidelinks% remove link color and border
+}
+%---------------------------------------------------------------------------%
+%->> Configuration command
+%---------------------------------------------------------------------------%
+%-
+%-> Extensions and directories for graphics
+%-
+%- Declare graphic extensions for automatic selection when including graphics
+%- via avoiding supplying graphic extensions in \includegraphics command,
+%- the source file can be more general and adaptive
+\ifartx@xetex%
+ \DeclareGraphicsExtensions{.pdf,.png,.jpg,.eps,.tif,.bmp,.gif}%
+\else% <pdflatex> or <lualatex>
+ \DeclareGraphicsExtensions{.pdf,.png,.jpg}%
+\fi
+\graphicspath{{Img/}}% search path for figures
+%-
+%-> Layout, space, and style
+%-
+\ifartx@geometry% enable geometry to redefine page layout
+ \geometry{paper=a4paper,left=31.7mm,right=31.7mm,top=25.4mm,bottom=25.4mm}%
+\fi
+%\linespread{1.5}% 1.5 for "one and a half" line spacing, and 2.0 for "double" line spacing
+%\setlength{\parskip}{0.5ex plus 0.25ex minus 0.25ex}% skip space a paragraph
+\setcounter{tocdepth}{2}% depth for the table of contents
+\setcounter{secnumdepth}{3}% depth for section numbering, default is 2(subsub)
+%- Set equation, figure, table numbering
+%\numberwithin{equation}{section}% set enumeration level
+%\renewcommand{\theequation}{\thesection\arabic{equation}}% configure the label style
+%\numberwithin{figure}{section}% set enumeration level
+%\renewcommand{\thefigure}{\thesection\arabic{figure}}% configure the label style
+%\numberwithin{table}{section}% set enumeration level
+%\renewcommand{\thetable}{\thesection\arabic{table}}% configure the label style
+%- Set bibliography entry
+\ifartx@bibtex%
+ \setlength{\bibsep}{0.0ex plus 0.2ex minus 0.2ex}% set distance between entries
+\fi
+\ifartx@biber%
+ \setlength\bibitemsep{0.5\baselineskip}% set distance between entries
+\fi
+\renewcommand*{\bibfont}{\small}% set font size for bibliography
+%-
+%-> Nomenclature item
+%-
+\providecommand{\nomenclatureitem}[3][ ]{%
+ \noindent\makebox[0.15\textwidth][l]{#2}{{#3}\hfill{#1}}\par
+}
+%-
+%-> Macro for adding content link to the table of content and bookmark
+%-
+\providecommand{\intotoc}[2][\maincounter]{%
+ \cleardoublepage% ensure correct page reference
+ \markboth{\MakeUppercase{#2}}{}% set the leftmark
+ \phantomsection% create link in bookmarks
+ \addcontentsline{toc}{#1}{#2}% add content #2 to toc as #1
+}
+%-
+%-> Page header and footer Style
+%-
+%- Page styles in Latex refers to headers and footers of a document.
+%- These headers/footers typically contain document titles, chapter
+%- or section numbers/names, and page numbers.
+%- Configure fancy style
+\ifartx@myhdr% user defined header and footer style
+ \pagestyle{fancy}%
+ \providecommand{\chaptermark}{}% compatibility for non-book classes
+ \providecommand{\thechapter}{}% compatibility for non-book classes
+ \providecommand{\CTEXthechapter}{\thechapter.}% compatibility for non ctex classes
+ %- reset style of chapter and section mark to actual name
+ \renewcommand{\chaptermark}[1]{\markboth{\MakeUppercase{#1}}{}}%
+ \renewcommand{\sectionmark}[1]{\markright{\MakeUppercase{#1}}{}}%
+ %- deactivate uppercase effect
+ \renewcommand{\MakeUppercase}[1]{#1}%
+ %- Define different kinds of header and footer for different parts
+ \fancypagestyle{frontmatterstyle}{% style for frontmatter
+ \fancyhf{}% clear fields
+ \fancyhead[CE]{\footnotesize \@title}% structure elements
+ \fancyhead[CO]{\footnotesize \leftmark}% structure elements
+ \fancyfoot[CE]{\footnotesize \thepage}% page number
+ \fancyfoot[CO]{\footnotesize \thepage}% page number
+ \renewcommand{\headrulewidth}{0.8pt}% header rule
+ \renewcommand{\footrulewidth}{0pt}% footer rule
+ }
+ \fancypagestyle{mainmatterstyle}{% style for mainmatter
+ \fancyhf{}% clear fields
+ \fancyhead[CE]{\footnotesize \@title}% structure elements
+ \fancyhead[CO]{\footnotesize \CTEXthechapter\ \leftmark}% structure elements
+ \fancyfoot[LE]{\footnotesize \thepage}% page number
+ \fancyfoot[RO]{\footnotesize \thepage}% page number
+ \renewcommand{\headrulewidth}{0.8pt}% header rule
+ \renewcommand{\footrulewidth}{0pt}% footer rule
+ }
+ \fancypagestyle{backmatterstyle}{% header and footer style for backmatter
+ \fancyhf{}% clear fields
+ \fancyhead[CE]{\footnotesize \@title}% structure elements
+ \fancyhead[CO]{\footnotesize \leftmark}% structure elements
+ \fancyfoot[LE]{\footnotesize \thepage}% page number
+ \fancyfoot[RO]{\footnotesize \thepage}% page number
+ \renewcommand{\headrulewidth}{0.8pt}% header rule
+ \renewcommand{\footrulewidth}{0pt}% footer rule
+ }
+ %- Redefine \frontmatter to include the change
+ \providecommand{\frontmatter}{}% compatibility for non-book classes
+ \let\myfrontmatter\frontmatter%
+ \renewcommand{\frontmatter}{%
+ \myfrontmatter%
+ \pagestyle{frontmatterstyle}%
+ }
+ %- Redefine \mainmatter to include the change
+ \providecommand{\mainmatter}{}% compatibility for non-book classes
+ \let\mymainmatter\mainmatter%
+ \renewcommand{\mainmatter}{%
+ \mymainmatter%
+ \pagestyle{mainmatterstyle}%
+ }
+ %- Redefine \backmatter to include the change
+ \providecommand{\backmatter}{}% compatibility for non-book classes
+ \let\mybackmatter\backmatter%
+ \renewcommand{\backmatter}{%
+ \mybackmatter%
+ \pagestyle{backmatterstyle}%
+ }
+ %- Some Latex commands, like \chapter, use the \thispagestyle command
+ %- to automatically switch to the plain page style, thus ignoring the
+ %- page style currently in effect. To customize such pages you must
+ %- redefine the plain pagestyle. If you want the plain style inherits
+ %- the current style, comment all the lines in plain style definition.
+ \fancypagestyle{plain}{%
+ %\fancyhf{}% clear fields
+ %\renewcommand{\headrulewidth}{0pt}% header rule
+ %\renewcommand{\footrulewidth}{0pt}% footer rule
+ }
+ \fancypagestyle{noheaderstyle}{% header and footer style for no header
+ \fancyhf{}% clear fields
+ %\fancyhead[CE]{\footnotesize \@title}% structure elements
+ %\fancyhead[CO]{\footnotesize \leftmark}% structure elements
+ \fancyfoot[LE]{\footnotesize \thepage}% page number
+ \fancyfoot[RO]{\footnotesize \thepage}% page number
+ \renewcommand{\headrulewidth}{0pt}% header rule
+ \renewcommand{\footrulewidth}{0pt}% footer rule
+ }
+\fi
+%-
+%-> Configure landscape environment
+%-
+\ifartx@lscape%
+\fancypagestyle{lscape}{% landscape layout style
+ \fancyhf{}% clear fields
+ \fancyfoot[CE,CO]{%
+ %- textpos: \begin{textblock}{<blockwidth>}[0.5,0.5](<hpos>,<vpos>) text... \end{textblock}
+ %- origin of the absolute coordinate is the top-left corner of the page
+ %- [0.5,0.5] means reference point of the block is the middle
+ \begin{textblock}{0.1}[0.5,0.5](0.85,0.5){\rotatebox{90}{\footnotesize \thepage}}\end{textblock}% position the page number
+ }
+ %- set units of <blockwidth>, <hpos>, and <vpos> arguments by corresponding modules
+ \setlength{\TPHorizModule}{8.5in}% set to the width of page
+ \setlength{\TPVertModule}{11in}% set to the height of page
+ \renewcommand{\headrulewidth}{0pt}% header rule
+ \renewcommand{\footrulewidth}{0pt}% footer rule
+}
+\fi
+\ifartx@list% enable enhanced list
+ \definecolor{mygreen}{rgb}{0,0.6,0}
+ \definecolor{mygray}{rgb}{0.5,0.5,0.5}
+ \definecolor{mymauve}{rgb}{0.58,0,0.82}
+ \lstset{%
+ numberbychapter=false,% numbered sequentially or by chapter
+ backgroundcolor=\color{white},% background color;
+ basicstyle=\scriptsize,% font size for code
+ breakatwhitespace=false,% sets if automatic breaks should only happen at whitespace
+ breaklines=true,% sets automatic line breaking
+ captionpos=b,% caption-position to bottom
+ commentstyle=\color{mygreen},% comment style
+ %deletekeywords={...},% delete keywords from the given language
+ frame=single,% adds a frame around the code
+ keepspaces=true,% keeps spaces in text for keeping indentation of code
+ keywordstyle=\color{blue},% keyword style
+ %otherkeywords={*,...},% add more keywords to the set
+ numbers=left,% where to put the line-numbers; possible values are (none, left, right)
+ numbersep=5pt,% how far the line-numbers are from the code
+ numberstyle=\tiny\color{mygray},% the style that is used for the line-numbers
+ rulecolor=\color{black},% if not set, the frame-color may be changed on line-breaks
+ showspaces=false,% show spaces everywhere adding particular underscores;
+ showstringspaces=false,% underline spaces within strings only
+ showtabs=false,% show tabs within strings adding particular underscores
+ stepnumber=2,% the step between two line-numbers. If it's 1, each line will be numbered
+ stringstyle=\color{mymauve},% string literal style
+ tabsize=2,% sets default tabsize to 2 spaces
+ title=\lstname% show the filename of files
+ }
+\fi
+%-
+%-> Page background
+%-
+\ifartx@background%
+ \definecolor{backgroundcolor}{rgb}{0.85,0.85,0.85}%
+ \pagecolor{backgroundcolor}% background color
+\fi
+%---------------------------------------------------------------------------%
+\endinput
+
diff --git a/Style/ucasthesis.cfg b/Style/ucasthesis.cfg
new file mode 100644
index 0000000..7b58a96
--- /dev/null
+++ b/Style/ucasthesis.cfg
@@ -0,0 +1,105 @@
+%---------------------------------------------------------------------------%
+%- -%
+%- Document Class Configuration -%
+%- -%
+%---------------------------------------------------------------------------%
+%- Copyright (C) Huangrui Mo <[email protected]>
+%- This is free software: you can redistribute it and/or modify it
+%- under the terms of the GNU General Public License as published by
+%- the Free Software Foundation, either version 3 of the License, or
+%- (at your option) any later version.
+%---------------------------------------------------------------------------%
+%->> Identification
+%---------------------------------------------------------------------------%
+\ProvidesFile{ucasthesis.cfg}[2014/10/01 v1.0 class configuration file]%
+%---------------------------------------------------------------------------%
+%->> Chinese titlepage
+%---------------------------------------------------------------------------%
+\def\ucas@label@ch@confidential{密级:}
+\def\ucas@label@ch@thesis{}
+\def\ucas@label@ch@bacthesis{学士学位论文}
+\def\ucas@label@ch@masthesis{硕士学位论文}
+\def\ucas@label@ch@docthesis{博士学位论文}
+\def\ucas@label@ch@author{作者姓名:}
+\def\ucas@label@ch@advisor{指导教师:}
+\def\ucas@label@ch@degree{学位类别:}
+\def\ucas@label@ch@major{}
+\def\ucas@label@ch@grad@major{学科专业:}
+\def\ucas@label@ch@und@major{专\quad\quad 业:}
+\def\ucas@label@ch@institute{}
+\def\ucas@label@ch@grad@institute{培养单位:}
+\def\ucas@label@ch@und@institute{学院(系):}
+%---------------------------------------------------------------------------%
+%->> English titlepage
+%---------------------------------------------------------------------------%
+\def\ucas@label@en@statement{A \ucas@value@en@thesistype\ submitted to\\
+ University of Chinese Academy of Sciences\\
+ in partial fulfillment of the requirement\\
+ for the degree of\\
+ \ucas@value@en@degree\ of \ucas@value@en@degreetype\\
+ in\ \ucas@value@en@major}
+%---------------------------------------------------------------------------%
+%->> Structure elements
+%---------------------------------------------------------------------------%
+\def\ucas@label@ch@tocname{目\quad 录}
+\def\ucas@label@en@tocname{Contents}
+\def\ucas@label@ch@lsfigname{图形列表}
+\def\ucas@label@en@lsfigname{List of Figures}
+\def\ucas@label@ch@lstabname{表格列表}
+\def\ucas@label@en@lstabname{List of Tables}
+\def\ucas@label@ch@algname{算法}
+\def\ucas@label@en@algname{Algorithm}
+\def\ucas@label@ch@bibname{参考文献}
+\def\ucas@label@en@bibname{References}
+\def\ucas@label@ch@bibetal{等}
+\def\ucas@label@en@bibetal{et al.}
+\def\ucas@label@ch@biband{和}
+\def\ucas@label@en@biband{ and }
+\def\ucas@label@ch@axiomname{公理}
+\def\ucas@label@en@axiomname{Axiom}
+\def\ucas@label@ch@theoremname{定理}
+\def\ucas@label@en@theoremname{Theorem}
+\def\ucas@label@ch@lemmaname{引理}
+\def\ucas@label@en@lemmaname{Lemma}
+\def\ucas@label@ch@corollaryname{推论}
+\def\ucas@label@en@corollaryname{Corollary}
+\def\ucas@label@ch@assertionname{断言}
+\def\ucas@label@en@assertionname{Assertion}
+\def\ucas@label@ch@propositionname{命题}
+\def\ucas@label@en@propositionname{Proposition}
+\def\ucas@label@ch@conjecturename{猜想}
+\def\ucas@label@en@conjecturename{Conjecture}
+\def\ucas@label@ch@definitionname{定义}
+\def\ucas@label@en@definitionname{Definition}
+\def\ucas@label@ch@examplename{例}
+\def\ucas@label@en@examplename{Example}
+\def\ucas@label@ch@remarkname{注}
+\def\ucas@label@en@remarkname{Remark}
+\def\ucas@label@ch@proofname{证明}
+\def\ucas@label@en@proofname{Proof}
+\def\ucas@label@ch@keywords{关键词:}
+\def\ucas@label@en@keywords{Key Words:}
+%---------------------------------------------------------------------------%
+%->> Author's declaration
+%---------------------------------------------------------------------------%
+\def\ucas@label@ch@declare@creativity{中国科学院大学\\研究生学位论文原创性声明}
+\def\ucas@value@ch@declare@creativity{%
+ 本人郑重声明:所呈交的学位论文是本人在导师的指导下独立进行研究工作所取得的成果。
+ 尽我所知,除文中已经注明引用的内容外,本论文不包含任何其他个人或集体已经发表
+ 或撰写过的研究成果。对论文所涉及的研究工作做出贡献的其他个人和集体,均已在文中
+ 以明确方式标明或致谢。
+}
+\def\ucas@label@ch@declare@author{作者签名:}
+\def\ucas@label@ch@declare@advisor{导师签名:}
+\def\ucas@label@ch@declare@date{日\quad\quad 期:}
+\def\ucas@label@ch@declare@rights{中国科学院大学\\学位论文授权使用声明}
+\def\ucas@value@ch@declare@rights{%
+ 本人完全了解并同意遵守中国科学院有关保存和使用学位论文的规定,即中国科学院有权
+ 保留送交学位论文的副本,允许该论文被查阅,可以按照学术研究公开原则和保护知识产
+ 权的原则公布该论文的全部或部分内容,可以采用影印、缩印或其他复制手段保存、汇编
+ 本学位论文。
+}
+\def\ucas@value@ch@declare@rule{涉密及延迟公开的学位论文在解密或延迟期后适用本声明。}
+%---------------------------------------------------------------------------%
+\endinput
+
diff --git a/Style/ucasthesis.cls b/Style/ucasthesis.cls
new file mode 100644
index 0000000..a61c6d7
--- /dev/null
+++ b/Style/ucasthesis.cls
@@ -0,0 +1,546 @@
+%---------------------------------------------------------------------------%
+%- -%
+%- Document Class -%
+%- -%
+%---------------------------------------------------------------------------%
+%- Copyright (C) Huangrui Mo <[email protected]>
+%- This is free software: you can redistribute it and/or modify it
+%- under the terms of the GNU General Public License as published by
+%- the Free Software Foundation, either version 3 of the License, or
+%- (at your option) any later version.
+%---------------------------------------------------------------------------%
+%->> Identification
+%---------------------------------------------------------------------------%
+\NeedsTeXFormat{LaTeX2e}%
+\ProvidesClass{ucasthesis}[2014/10/01 v1.0 LaTeX document class]%
+%---------------------------------------------------------------------------%
+%->> Declare options
+%---------------------------------------------------------------------------%
+%-
+%-> Layout
+%-
+\DeclareOption{singlesided}{% enable single-sided printing
+ \PassOptionsToClass{oneside}{ctexbook}%
+}
+\DeclareOption{doublesided}{% enable double-sided printing
+ \PassOptionsToClass{twoside}{ctexbook}%
+}
+\newif\ifucas@printcopy \ucas@printcopyfalse
+\DeclareOption{printcopy}{% enable print copy layout
+ \PassOptionsToClass{twoside}{ctexbook}%
+ \ucas@printcopytrue%
+}
+%-
+%-> Language
+%-
+\newif\ifucas@plain \ucas@plainfalse
+\DeclareOption{plain}{% enable plain writing style
+ \PassOptionsToClass{scheme=plain}{ctexbook}%
+ \ucas@plaintrue%
+}
+%-
+%-> Draft version info
+%-
+\newif\ifucas@versioninfo \ucas@versioninfofalse
+\DeclareOption{draftversion}{%
+ \ucas@versioninfotrue%
+}
+%-
+%-> Handle non-implemented options
+%-
+\DeclareOption*{%
+ \PassOptionsToClass{\CurrentOption}{ctexbook}%
+}
+%-
+%-> Terminates all options processing
+%-
+\ProcessOptions\relax%
+%---------------------------------------------------------------------------%
+%->> Load class information
+%---------------------------------------------------------------------------%
+\LoadClass[UTF8,a4paper,zihao=-4]{ctexbook}
+%---------------------------------------------------------------------------%
+%->> Required packages
+%---------------------------------------------------------------------------%
+\RequirePackage{ifxetex}% LaTeX engine detection
+\RequirePackage{etoolbox}% a toolbox of programming facilities
+\newcommand{\ucasifstreq}{\expandafter\ifstrequal\expandafter}% expansion control
+\newcommand{\ucasifstrbk}{\expandafter\ifblank\expandafter}% expansion control
+%---------------------------------------------------------------------------%
+%->> Load class configuration
+%---------------------------------------------------------------------------%
+\AtEndOfPackage{% class cfg loaded after package to make preamble commands take effect
+ \makeatletter
+ \InputIfFileExists{Style/ucasthesis.cfg}{}{}
+ \makeatother
+}
+%---------------------------------------------------------------------------%
+%->> Page layout
+%---------------------------------------------------------------------------%
+%- part one -- horizontal widths
+%- left side width + textwidth + right side width = paperwidth
+%- binding side width + textwidth + nonbinding side width = paperwidth
+%- binding side width of [odd, even] page = [left, right] side width
+%- left side width of [odd, even] page = 1.0in (fixed) + hoffset + [odd, even]sidemargin
+%- assuming A4 paper (210mm x 297mm)
+\setlength{\textwidth}{146.6mm}% set required text width first
+\setlength{\hoffset}{0mm}% set horizontal offset
+\ifucas@printcopy% if print copy layout enabled
+ \setlength{\oddsidemargin}{12.6mm}% binding side margin
+ \setlength{\evensidemargin}{0mm}% ensure uniform binding side width for printing
+\else
+ \setlength{\oddsidemargin}{6.3mm}% left side margin
+ \setlength{\evensidemargin}{6.3mm}% ensure uniform left side width for EThesis
+\fi
+\setlength{\marginparwidth}{35pt}% width of margin notes
+\setlength{\marginparsep}{10pt}% width of space between body text and margin notes
+%- part two -- vertical heights
+%- top height + textheight + bottom height = paperheight
+%- top height = 1.0in (fixed) + voffset + topmargin + headheight + headsep
+\setlength{\textheight}{246.2mm}% set required text height first
+\setlength{\voffset}{-17.4mm}% set vertical offset
+\setlength{\topmargin}{20pt}% vertical margin above header
+\setlength{\headheight}{12pt}% header height
+\setlength{\headsep}{17.5pt}% vertical margin between header and body text
+\setlength{\footskip}{29.5pt}% vertical margin between footer and body text
+%- specifies the amount of space between paragraphs.
+\setlength{\parskip}{0.5ex plus 0.25ex minus 0.25ex}
+%- line spacing
+\linespread{1.5}% line space setting
+\raggedbottom% prevent adding vertical white space in strange places
+%- default pagestyle is page number at bottom without headers and footers
+\pagestyle{plain}
+%---------------------------------------------------------------------------%
+%->> Style control commands
+%---------------------------------------------------------------------------%
+%- redefine cleardoublepage to have page style argument
+\renewcommand{\cleardoublepage}[1][plain]{%
+ \clearpage\if@twoside\ifodd\c@page\else%
+ \thispagestyle{#1}%
+ \hbox{}\newpage\if@twocolumn\hbox{}\newpage\fi\fi\fi%
+}
+%- underline
+\ifxetex% use underline from xeCJKfntef
+ \renewcommand{\CJKunderlinecolor}{\color[rgb]{0,0,0}}% set underline color
+ \renewcommand{\uline}[1]{\CJKunderline{#1}}% unified name
+\else% use underline from ulem
+ \RequirePackage{ulem}%
+\fi
+\newcommand{\ulenhance}[2][1pt]{% enhanced underline
+ \def\ULthickness{#1}% set thickness
+ \uline{#2}}
+\newcommand{\ulhshift}{-4em}% horizontal shift on underline
+\newcommand{\ulextend}[2][350pt]{% extend underline length
+ \hbox to #1{\hfill\hspace*{\ulhshift}#2\hfill}}
+%---------------------------------------------------------------------------%
+%->> Titlepage
+%---------------------------------------------------------------------------%
+%-
+%-> Chinese item commands
+%-
+\def\ucas@value@ch@confidential{}
+\newcommand{\confidential}[1]{\def\ucas@value@ch@confidential{#1}}
+\def\ucas@value@ch@schoollogo{}
+\newcommand{\schoollogo}[2]{\def\ucas@value@ch@schoollogo{\includegraphics[#1]{#2}}}
+\def\ucas@value@ch@title{}
+\def\ucas@value@ch@titlemark{}
+\renewcommand{\title}[2][\ucas@value@ch@title]{%
+ \def\ucas@value@ch@title{#2}
+ %\def\ucas@value@ch@titlemark{\MakeUppercase{#1}}}
+ \def\ucas@value@ch@titlemark{#1}}
+\renewcommand{\@title}{\ucas@value@ch@titlemark}
+\def\ucas@value@ch@author{}
+\renewcommand{\author}[1]{\def\ucas@value@ch@author{#1}}
+\def\ucas@value@ch@advisor{}
+\newcommand{\advisor}[1]{\def\ucas@value@ch@advisor{#1}}
+\def\ucas@value@ch@advisors{}
+\newcommand{\advisors}[1]{\def\ucas@value@ch@advisors{#1}}
+\def\ucas@value@ch@degree{}
+\newcommand{\degree}[1]{\def\ucas@value@ch@degree{#1}}
+\def\ucas@value@ch@degreetype{}
+\newcommand{\degreetype}[1]{\def\ucas@value@ch@degreetype{#1}}
+\def\ucas@value@ch@major{}
+\newcommand{\major}[1]{\def\ucas@value@ch@major{#1}}
+\def\ucas@value@ch@institute{}
+\newcommand{\institute}[1]{\def\ucas@value@ch@institute{#1}}
+\def\ucas@value@ch@date{}
+\renewcommand{\date}[1]{\def\ucas@value@ch@date{#1}}
+%-
+%-> Redefine Chinese style
+%-
+\renewcommand{\maketitle}{%
+ \ucasifstreq{\ucas@value@en@degree}{Bachelor}{%
+ \def\ucas@label@ch@thesis{\ucas@label@ch@bacthesis}%
+ \def\ucas@label@ch@major{\ucas@label@ch@und@major}%
+ \def\ucas@label@ch@institute{\ucas@label@ch@und@institute}%
+ }{%
+ \ucasifstreq{\ucas@value@en@degree}{Master}{%
+ \def\ucas@label@ch@thesis{\ucas@label@ch@masthesis}%
+ \def\ucas@label@ch@major{\ucas@label@ch@grad@major}%
+ \def\ucas@label@ch@institute{\ucas@label@ch@grad@institute}%
+ }{%
+ \def\ucas@label@ch@thesis{\ucas@label@ch@docthesis}%
+ \def\ucas@label@ch@major{\ucas@label@ch@grad@major}%
+ \def\ucas@label@ch@institute{\ucas@label@ch@grad@institute}%
+ }}%
+ \cleardoublepage
+ \thispagestyle{empty}
+ \begin{center}
+ \linespread{1.5}
+ \zihao{4}\bfseries
+
+ \hfill{} \ucasifstrbk{\ucas@value@ch@confidential}{}{\ucas@label@ch@confidential \ulenhance{\ulextend[50pt]{\hspace*{-\ulhshift}\zihao{5}\ucas@value@ch@confidential}}}
+
+ \vspace*{\stretch{4}}
+
+ {\ucas@value@ch@schoollogo}
+
+ \vspace*{\stretch{2}}
+
+ {\zihao{1}\bfseries\sffamily {\ucas@label@ch@thesis}}
+
+ \vspace*{\stretch{3}}
+
+ {\zihao{-3}\bfseries\sffamily \ulenhance[1.5pt]{\ \ucas@value@ch@title\ }}
+
+ \vspace*{\stretch{3}}
+
+ \def\tabcolsep{1pt}
+ \def\arraystretch{1.3}
+ \begin{tabular}{lc}
+ \ucas@label@ch@author & \ulenhance[1.2pt]{\ulextend{\ucas@value@ch@author}}\\
+ \ucas@label@ch@advisor & \ulenhance[1.2pt]{\ulextend{\ucas@value@ch@advisor}}\\
+ & \ulenhance[1.2pt]{\ulextend{\ucas@value@ch@advisors}}\\
+ \ucas@label@ch@degree & \ulenhance[1.2pt]{\ulextend{\ucas@value@ch@degreetype\ucas@value@ch@degree}}\\
+ \ucas@label@ch@major & \ulenhance[1.2pt]{\ulextend{\ucas@value@ch@major}}\\
+ \ucas@label@ch@institute & \ulenhance[1.2pt]{\ulextend{\ucas@value@ch@institute}}\\
+ \end{tabular}
+
+ \vspace*{\stretch{4.5}}
+
+ {\ucas@value@ch@date}
+
+ \vspace*{\stretch{3.5}}
+ \end{center}
+ \clearpage
+ \if@twoside
+ \thispagestyle{empty}
+ \ifucas@versioninfo
+ \vspace*{\stretch{1}}
+ \begin{footnotesize}
+ \noindent
+ Draft Version (\today)
+ \end{footnotesize}
+ \fi
+ \cleardoublepage[empty]
+ \else
+ \ifucas@versioninfo
+ \thispagestyle{empty}
+ \vspace*{\stretch{1}}
+ \begin{footnotesize}
+ \noindent
+ Draft Version (\today)
+ \end{footnotesize}
+ \cleardoublepage[empty]
+ \fi
+ \fi
+}
+%-
+%-> English item commands
+%-
+\def\ucas@value@en@title{}
+\newcommand{\TITLE}[1]{\def\ucas@value@en@title{#1}}
+\def\ucas@value@en@author{}
+\newcommand{\AUTHOR}[1]{\def\ucas@value@en@author{#1}}
+\def\ucas@value@en@advisor{}
+\newcommand{\ADVISOR}[1]{\def\ucas@value@en@advisor{#1}}
+\def\ucas@value@en@degree{}
+\newcommand{\DEGREE}[1]{\edef\ucas@value@en@degree{\zap@space#1 \@empty}}% expand and remove space
+\def\ucas@value@en@degreetype{}
+\newcommand{\DEGREETYPE}[1]{\def\ucas@value@en@degreetype{#1}}
+\def\ucas@value@en@thesistype{}
+\newcommand{\THESISTYPE}[1]{\def\ucas@value@en@thesistype{#1}}
+\def\ucas@value@en@major{}
+\newcommand{\MAJOR}[1]{\def\ucas@value@en@major{#1}}
+\def\ucas@value@en@institute{}
+\newcommand{\INSTITUTE}[1]{\def\ucas@value@en@institute{#1}}
+\def\ucas@value@en@date{}
+\newcommand{\DATE}[1]{\def\ucas@value@en@date{#1}}
+%-
+%-> Redefine English style
+%-
+\newcommand{\MAKETITLE}{%
+ \cleardoublepage
+ \thispagestyle{empty}
+ \begin{center}
+ \linespread{1.5}
+ \zihao{4}\bfseries
+
+ \vspace*{50pt}
+
+ {\zihao{-3}\bfseries \ulenhance[1.5pt]{\ \ucas@value@en@title\ }}
+
+ \vspace*{\stretch{2}}
+
+ {\ucas@label@en@statement}
+
+ {By}
+
+ {\ucas@value@en@author}
+
+ {\ucas@value@en@advisor}
+
+ \vspace*{\stretch{3}}
+
+ {\ucas@value@en@institute}
+
+ \vspace*{\stretch{1}}
+
+ {\ucas@value@en@date}
+
+ \vspace*{\stretch{3}}
+ \end{center}
+ \clearpage
+ \if@twoside
+ \thispagestyle{empty}
+ \cleardoublepage[empty]
+ \fi
+}
+%---------------------------------------------------------------------------%
+%->> Author's declaration
+%---------------------------------------------------------------------------%
+\newcommand{\makedeclaration}{%
+ \cleardoublepage
+ \thispagestyle{empty}
+ {
+ \linespread{1.5}
+ \zihao{-4}
+
+ \vspace*{2ex}
+
+ \begin{center}
+ {\zihao{4}\bfseries\sffamily \ucas@label@ch@declare@creativity}
+ \end{center}
+
+ {\ucas@value@ch@declare@creativity}
+
+ \vspace*{3ex}
+
+ {\hfill{} {\ucas@label@ch@declare@author \hspace*{14em}}}
+
+ {\hfill{} {\ucas@label@ch@declare@date \hspace*{14em}}}
+
+ \vspace*{6ex}
+
+ \begin{center}
+ {\zihao{4}\bfseries\sffamily \ucas@label@ch@declare@rights}
+ \end{center}
+
+ {\ucas@value@ch@declare@rights}
+
+ {\ucas@value@ch@declare@rule}
+
+ \vspace*{3ex}
+
+ {\hfill{} {\ucas@label@ch@declare@author \hspace*{10em} \ucas@label@ch@declare@advisor \hspace*{9em}}}
+
+ {\hfill{} {\ucas@label@ch@declare@date \hspace*{10em} \ucas@label@ch@declare@date \hspace*{9em}}}
+
+ \vspace*{3ex}
+ }
+ \clearpage
+ \if@twoside
+ \thispagestyle{empty}
+ \cleardoublepage[empty]
+ \fi
+}
+%---------------------------------------------------------------------------%
+%->> New environments
+%---------------------------------------------------------------------------%
+%- define chinese keywords
+\newcommand{\keywords}[1]{%
+ \vspace{\baselineskip}
+ \noindent {\bfseries \ucas@label@ch@keywords} #1}
+%- define engish keywords
+\newcommand{\KEYWORDS}[1]{%
+ \vspace{\baselineskip}
+ \noindent {\bfseries \ucas@label@en@keywords} #1}
+%---------------------------------------------------------------------------%
+%->> Structure elements
+%---------------------------------------------------------------------------%
+\ifucas@plain%
+ \renewcommand{\contentsname}{\ucas@label@en@tocname}
+ \renewcommand{\listfigurename}{\ucas@label@en@lsfigname}
+ \renewcommand{\listtablename}{\ucas@label@en@lstabname}
+ \renewcommand{\bibname}{\ucas@label@en@bibname}
+ \newcommand{\algname}{\ucas@label@en@algname}
+ \newcommand{\bibetal}{\ucas@label@en@bibetal}
+ \newcommand{\biband}{\ucas@label@en@biband}
+ \newcommand{\axiomname}{\ucas@label@en@axiomname}
+ \newcommand{\theoremname}{\ucas@label@en@theoremname}
+ \newcommand{\lemmaname}{\ucas@label@en@lemmaname}
+ \newcommand{\corollaryname}{\ucas@label@en@corollaryname}
+ \newcommand{\assertionname}{\ucas@label@en@assertionname}
+ \newcommand{\propositionname}{\ucas@label@en@propositionname}
+ \newcommand{\conjecturename}{\ucas@label@en@conjecturename}
+ \newcommand{\definitionname}{\ucas@label@en@definitionname}
+ \newcommand{\examplename}{\ucas@label@en@examplename}
+ \newcommand{\remarkname}{\ucas@label@en@remarkname}
+ \renewcommand{\proofname}{\ucas@label@en@proofname}
+\else
+ \renewcommand{\contentsname}{\ucas@label@ch@tocname}
+ \renewcommand{\listfigurename}{\ucas@label@ch@lsfigname}
+ \renewcommand{\listtablename}{\ucas@label@ch@lstabname}
+ \newcommand{\algname}{\ucas@label@ch@algname}
+ \renewcommand{\bibname}{\ucas@label@ch@bibname}
+ \newcommand{\bibetal}{\ucas@label@ch@bibetal}
+ \newcommand{\biband}{\ucas@label@ch@biband}
+ \newcommand{\axiomname}{\ucas@label@ch@axiomname}
+ \newcommand{\theoremname}{\ucas@label@ch@theoremname}
+ \newcommand{\lemmaname}{\ucas@label@ch@lemmaname}
+ \newcommand{\corollaryname}{\ucas@label@ch@corollaryname}
+ \newcommand{\assertionname}{\ucas@label@ch@assertionname}
+ \newcommand{\propositionname}{\ucas@label@ch@propositionname}
+ \newcommand{\conjecturename}{\ucas@label@ch@conjecturename}
+ \newcommand{\definitionname}{\ucas@label@ch@definitionname}
+ \newcommand{\examplename}{\ucas@label@ch@examplename}
+ \newcommand{\remarkname}{\ucas@label@ch@remarkname}
+ \renewcommand{\proofname}{\ucas@label@ch@proofname}
+\fi
+%---------------------------------------------------------------------------%
+%->> Structure layout
+%---------------------------------------------------------------------------%
+%- chapter
+\ctexset {
+ chapter = {
+ format = \linespread{1.0}\zihao{4}\bfseries\sffamily\centering,
+ number = \arabic{chapter},
+ %numberformat = \rmfamily,
+ aftername = \quad,
+ beforeskip = {7pt},
+ afterskip = {18pt},
+ pagestyle = plain,
+ }
+}
+%- section
+\ctexset {
+ section = {
+ format = \linespread{1.0}\zihao{-4}\sffamily\raggedright,
+ %numberformat = \rmfamily,
+ aftername = \quad,
+ beforeskip = {24pt},
+ afterskip = {6pt},
+ }
+}
+%- subsection
+\ctexset {
+ subsection = {
+ format = \linespread{1.0}\zihao{-4}\sffamily\raggedright,
+ %numberformat = \rmfamily,
+ aftername = \quad,
+ beforeskip = {12pt},
+ afterskip = {6pt},
+ }
+}
+%- subsubsection
+\ctexset {
+ subsubsection = {
+ format = \linespread{1.0}\zihao{-4}\sffamily\raggedright,
+ %numberformat = \rmfamily,
+ aftername = \quad,
+ beforeskip = {12pt},
+ afterskip = {6pt},
+ }
+}
+%- appendix
+\ctexset {
+ appendix = {
+ %numbering = true|false,
+ %number = \rmfamily\Alph{chapter},
+ }
+}
+%---------------------------------------------------------------------------%
+%->> Configure table of contents
+%---------------------------------------------------------------------------%
+%- the table of contents is specified by defining \l@chapter, \l@section, ...
+%- most commands are then defined with the \@dottedtocline:
+%- \@dottedtocline{⟨level⟩}{⟨indent⟩}{⟨numwidth⟩}{⟨title⟩}{⟨page⟩}
+%- ⟨level⟩ an entry is produced only if ⟨level⟩ <= value of the tocdepth counter
+%- note, \chapter is level 0, \section is level 1, etc
+%- ⟨indent⟩ indentation from the outer left margin of the start of the contents line
+%- ⟨numwidth⟩ width of a box in which the section number is to go
+%- <title> title
+%- <page> page number
+
+%- parameters for spacing and length in toc
+\def\@dotsep{1.5mu}% separation between dots in mu units
+\def\@pnumwidth{2em}% width of a box in which the page number is put
+\def\@tocrmarg{2em}% right margin for multiple line entries, \@tocrmarg ≥ \@pnumwidth
+\def\@chaptervspace{1ex}% vertical space between chapter titles
+
+%- redefine dottedtocline from classes.dtx and latex.ltx
+\renewcommand*{\@dottedtocline}[5]{% [<level>,<indent>,<numwidth>,<title>,<page>]
+ \ifnum #1>\c@tocdepth \else
+ \vskip \z@ \@plus.2\p@
+ {\leftskip #2\relax \rightskip \@tocrmarg \parfillskip -\rightskip
+ \parindent #2\relax\@afterindenttrue
+ \interlinepenalty\@M
+ \leavevmode \zihao{-4}\sffamily
+ \@tempdima #3\relax
+ \advance\leftskip \@tempdima \null\nobreak\hskip -\leftskip
+ {#4}\nobreak
+ \leaders\hbox{$\m@th\mkern \@dotsep \cdot\mkern \@dotsep$}\hfill
+ \nobreak
+ \hb@xt@\@pnumwidth{\hfil\normalfont \normalcolor #5}%
+ \par\penalty\@highpenalty}%
+ \fi
+}
+%- redefine l@part from book.cls to add dotted toc line
+\renewcommand*{\l@part}[2]{% [<title>,<page>]
+ \ifnum \c@tocdepth >-2\relax
+ \addpenalty{-\@highpenalty}%
+ \addvspace{2.25em \@plus\p@}%
+ \setlength\@tempdima{3em}%
+ \begingroup
+ \parindent \z@ \rightskip \@pnumwidth
+ \parfillskip -\@pnumwidth
+ {\leavevmode
+ \zihao{4}\sffamily #1
+ \leaders\hbox{$\m@th\mkern \@dotsep \cdot\mkern \@dotsep$}% add dotted toc line
+ \hfil \hb@xt@\@pnumwidth{\hss #2}}\par
+ \nobreak
+ \global\@nobreaktrue
+ \everypar{\global\@nobreakfalse\everypar{}}%
+ \endgroup
+ \fi
+}
+%- redefine l@chapter from book.cls to add dotted toc line
+\renewcommand*{\l@chapter}[2]{% [<title>,<page>]
+ \ifnum \c@tocdepth >\m@ne
+ \addpenalty{-\@highpenalty}%
+ \vskip \@chaptervspace \@plus\p@
+ \setlength\@tempdima{1.5em}%
+ \begingroup
+ \parindent \z@ \rightskip \@pnumwidth
+ \parfillskip -\@pnumwidth
+ \leavevmode \zihao{4}\sffamily
+ \advance\leftskip\@tempdima
+ \hskip -\leftskip
+ #1\nobreak
+ \leaders\hbox{$\m@th\mkern \@dotsep \cdot\mkern \@dotsep$}% add dotted toc line
+ \hfil \nobreak\hb@xt@\@pnumwidth{\hss #2}\par
+ \penalty\@highpenalty
+ \endgroup
+ \fi
+}
+%- redefine indentation for others
+\renewcommand*\l@section{\@dottedtocline{1}{1em}{2em}}
+\renewcommand*\l@subsection{\@dottedtocline{2}{2em}{3em}}
+\renewcommand*\l@subsubsection{\@dottedtocline{3}{3em}{4em}}
+\renewcommand*\l@paragraph{\@dottedtocline{4}{4em}{5em}}
+\renewcommand*\l@subparagraph{\@dottedtocline{5}{5em}{6em}}
+\renewcommand*\l@figure{\@dottedtocline{1}{1em}{2em}}
+\renewcommand*\l@table{\@dottedtocline{1}{1em}{2em}}
+%---------------------------------------------------------------------------%
+\endinput
+
diff --git a/Tex/Appendix.tex b/Tex/Appendix.tex
new file mode 100644
index 0000000..2869d06
--- /dev/null
+++ b/Tex/Appendix.tex
@@ -0,0 +1,24 @@
+\chapter{中国科学院大学学位论文撰写要求}
+
+学位论文是研究生科研工作成果的集中体现,是评判学位申请者学术水平、授予其学位的主要依据,是科研领域重要的文献资料。根据《科学技术报告、学位论文和学术论文的编写格式》(GB/T 7713-1987)、《学位论文编写规则》(GB/T 7713.1-2006)和《文后参考文献著录规则》(GB7714—87)等国家有关标准,结合中国科学院大学(以下简称“国科大”)的实际情况,特制订本规定。
+
+\section{论文无附录者无需附录部分}
+
+\section{测试公式编号} \label{sec:testmath}
+
+\begin{equation} \label{eq:appedns}
+ \adddotsbeforeeqnnum%
+ \begin{cases}
+ \frac{\partial \rho}{\partial t} + \nabla\cdot(\rho\Vector{V}) = 0\\
+ \frac{\partial (\rho\Vector{V})}{\partial t} + \nabla\cdot(\rho\Vector{V}\Vector{V}) = \nabla\cdot\Tensor{\sigma}\\
+ \frac{\partial (\rho E)}{\partial t} + \nabla\cdot(\rho E\Vector{V}) = \nabla\cdot(k\nabla T) + \nabla\cdot(\Tensor{\sigma}\cdot\Vector{V})
+ \end{cases}
+\end{equation}
+\begin{equation}
+ \adddotsbeforeeqnnum%
+ \frac{\partial }{\partial t}\int\limits_{\Omega} u \, \mathrm{d}\Omega + \int\limits_{S} \unitVector{n}\cdot(u\Vector{V}) \, \mathrm{d}S = \dot{\phi}
+\end{equation}
+
+\section{测试生僻字}
+
+霜蟾盥薇曜灵霜颸妙鬘虚霩淩澌菀枯菡萏泬寥窅冥毰毸濩落霅霅便嬛岧峣瀺灂姽婳愔嫕飒纚棽俪緸冤莩甲摛藻卮言倥侗椒觞期颐夜阑彬蔚倥偬澄廓簪缨陟遐迤逦缥缃鹣鲽憯懔闺闼璀错媕婀噌吰澒洞阛闠覼缕玓瓑逡巡諓諓琭琭瀌瀌踽踽叆叇氤氲瓠犀流眄蹀躞赟嬛茕頔璎珞螓首蘅皋惏悷缱绻昶皴皱颟顸愀然菡萏卑陬纯懿犇麤掱暒 墌墍墎墏墐墒墒墓墔墕墖墘墖墚墛坠墝增墠墡墢墣墤墥墦墧墨墩墪樽墬墭堕墯墰墱墲坟墴墵垯墷墸墹墺墙墼墽垦墿壀壁壂壃壄壅壆坛壈壉壊垱壌壍埙壏壐壑壒压壔壕壖壗垒圹垆壛壜壝垄壠壡坜壣壤壥壦壧壨坝塆圭嫶嫷嫸嫹嫺娴嫼嫽嫾婳妫嬁嬂嬃嬄嬅嬆嬇娆嬉嬊娇嬍嬎嬏嬐嬑嬒嬓嬔嬕嬖嬗嬘嫱嬚嬛嬜嬞嬟嬠嫒嬢嬣嬥嬦嬧嬨嬩嫔嬫嬬奶嬬嬮嬯婴嬱嬲嬳嬴嬵嬶嬷婶嬹嬺嬻嬼嬽嬾嬿孀孁孂娘孄孅孆孇孆孈孉孊娈孋孊孍孎孏嫫婿媚嵭嵮嵯嵰嵱嵲嵳嵴嵵嵶嵷嵸嵹嵺嵻嵼嵽嵾嵿嶀嵝嶂嶃崭嶅嶆岖嶈嶉嶊嶋嶌嶍嶎嶏嶐嶑嶒嶓嵚嶕嶖嶘嶙嶚嶛嶜嶝嶞嶟峤嶡峣嶣嶤嶥嶦峄峃嶩嶪嶫嶬嶭崄嶯嶰嶱嶲嶳岙嶵嶶嶷嵘嶹岭嶻屿岳帋巀巁巂巃巄巅巆巇巈巉巊岿巌巍巎巏巐巑峦巓巅巕岩巗巘巙巚帠帡帢帣帤帨帩帪帬帯帰帱帲帴帵帷帹帺帻帼帽帾帿幁幂帏幄幅幆幇幈幉幊幋幌幍幎幏幐幑幒幓幖幙幚幛幜幝幞帜幠幡幢幤幥幦幧幨幩幪幭幮幯幰幱庍庎庑庖庘庛庝庠庡庢庣庤庥庨庩庪庬庮庯庰庱庲庳庴庵庹庺庻庼庽庿廀厕廃厩廅廆廇廋廌廍庼廏廐廑廒廔廕廖廗廘廙廛廜廞庑廤廥廦廧廨廭廮廯廰痈廲廵廸廹廻廼廽廿弁弅弆弇弉弖弙弚弜弝弞弡弢弣弤弨弩弪弫弬弭弮弰弲弪弴弶弸弻弼弽弿彖彗彘彚彛彜彝彞彟彴彵彶彷彸役彺彻彽彾佛徂徃徆徇徉后徍徎徏径徒従徔徕徖徙徚徛徜徝从徟徕御徢徣徤徥徦徧徨复循徫旁徭微徯徰徱徲徳徴徵徶德徸彻徺忁忂惔愔忇忈忉忔忕忖忚忛応忝忞忟忪挣挦挧挨挩挪挫挬挭挮挰掇授掉掊掋掍掎掐掑排掓掔掕挜掚挂掜掝掞掟掠采探掣掤掦措掫掬掭掮掯掰掱掲掳掴掵掶掸掹掺掻掼掽掾掿拣揁揂揃揅揄揆揇揈揉揊揋揌揍揎揑揓揔揕揖揗揘揙揤揥揦揧揨揫捂揰揱揲揳援揵揶揷揸揻揼揾揿搀搁搂搃搄搅搇搈搉搊搋搌搎搏搐搑搒摓摔摕摖摗摙摚摛掼摝摞摠摡斫斩斮斱斲斳斴斵斶斸旪旫旮旯晒晓晔晕晖晗晘晙晛晜晞晟晠晡晰晣晤晥晦晧晪晫晬晭晰晱晲晳晴晵晷晸晹晻晼晽晾晿暀暁暂暃暄暅暆暇晕晖暊暋暌暍暎暏暐暑暒暓暔暕暖暗旸暙暚暛暜暝暞暟暠暡暣暤暥暦暧暨暩暪暬暭暮暯暰昵暲暳暴暵暶暷暸暹暺暻暼暽暾暿曀曁曂曃晔曅曈曊曋曌曍曎曏曐曑曒曓曔曕曗曘曙曚曛曜曝曞曟旷曡曢曣曤曥曦曧昽曩曪曫晒曭曮曯椗椘椙椚椛検椝椞椟椠椡椢椣椤椥椦椧椨椩椪椫椬椭椮椯椰椱椲椳椴椵椶椷椸椹椺椻椼椽椾椿楀楁楂楃楅楆楇楈楉杨楋楌楍榴榵榶榷榸榹榺榻榼榽榾桤槀槁槂盘槄槅槆槇槈槉槊构槌枪槎槏槐槑槒杠槔槕槖槗滙滛滜滝滞滟滠滢滣滦滧滪滫沪滭滮滰滱渗滳滵滶滹滺浐滼滽漀漃漄漅漈漉溇漋漌漍漎漐漑澙熹漗漘漙沤漛漜漝漞漟漡漤漥漦漧漨漪渍漭漮漯漰漱漳漴溆漶漷漹漺漻漼漽漾浆潀颍潂潃潄潅潆潇潈潉潊潋潌潍潎潏潐潒潓洁潕潖潗潘沩潚潜潝潞潟潠潡潢潣润潥潦潧潨潩潪潫潬潭浔溃潱潲潳潴潵潶滗潸潹潺潻潼潽潾涠澁澄澃澅浇涝澈澉澊澋澌澍澎澏湃澐澑澒澓澔澕澖涧澘澙澚澛澜澝澞澟渑澢澣泽浍澯澰淀澲澳澴澵澶澷澸潇潆瀡瀢瀣瀤瀥潴泷濑瀩瀪瀫瀬瀭瀮瀯弥瀱潋瀳瀴瀵瀶瀷瀸瀹瀺瀻瀼瀽澜瀿灀灁瀺灂沣滠灅灆灇灈灉灊灋灌灍灎灏灐洒灒灓漓灖灗滩灙灚灛灜灏灞灟灠灡灢湾滦灥灦灧灨灪燝燞燠燡燢燣燤燥灿燧燨燩燪燫燮燯燰燱燲燳烩燵燵燸燹燺薰燽焘燿爀爁爂爃爄爅爇爈爉爊爋爌烁爎爏爑爒爓爔爕爖爗爘爙爚烂爜爝爞爟爠爡爢爣爤爥爦爧爨爩猽猾獀犸獂獆獇獈獉獊獋獌獍獏獐獑獒獓獔獕獖獗獘獙獚獛獜獝獞獟獠獡獢獣獤獥獦獧獩狯猃獬獭狝獯狞獱獳獴獶獹獽獾獿猡玁玂玃。
diff --git a/Tex/Backmatter.tex b/Tex/Backmatter.tex
new file mode 100644
index 0000000..4bf7151
--- /dev/null
+++ b/Tex/Backmatter.tex
@@ -0,0 +1,40 @@
+\chapter[致谢]{致\quad 谢}\chaptermark{致\quad 谢}% syntax: \chapter[目录]{标题}\chaptermark{页眉}
+\thispagestyle{noheaderstyle}% 如果需要移除当前页的页眉
+%\pagestyle{noheaderstyle}% 如果需要移除整章的页眉
+
+时间飞逝,很快就到了硕士毕业的时候了。借此论文完成的机会,在此向多年来给我关心和帮组的老师、同学、家人和朋友们表示衷心的感谢!
+
+首先,要感谢我的父母,父母为我付出了毕生的精力,将我抚养成人。在我最困难的时候帮助我,虽然他们都不是科研人员,也没有很高的学术水平。但无论时科研上的烦心事,还是生活上的琐事,都会认真倾听,帮我解决问题。即使解决不了也会开导我。
+
+接着向我敬爱的小导师——郑超老师表达我的感谢之情。郑超老师严谨认真、学识渊博、思维敏捷且富有激情。在我硕士在读期间,郑超老师对我的研究生课程学习、课题研究等方面给出了很多的指导和帮助,让我开阔了思路,培养了独立进行研究和工程设计的思维和基本素养。
+
+其次,我要感谢组长和我的导师刘庆云老师在研究生阶段对我的帮助。刘老师视野开阔、知识渊博,并且十分关心学生的成长。每次都能发现学生工作、学习中的问题,并提出十分有指导意义的意见。
+
+感谢第二研究室处理架构组的周舟老师、杨嵘老师、朱宇佳老师、李佳老师、李钊老师、杨威老师等。在实验室里老师们对每一个学生都提供了真心的帮助,在科研学习上,能得到他们的帮助,让我受益颇多。我也会学习各位老师对科研锐意进取、对生活充满热情的学习生活态度,在今后的工作生活中继续进步。
+
+感谢在实验室给了我很大帮助的师兄师姐们:陆秋文、张曦、谭国林、黄彩云,他们在我最迷茫无助的时候给了我很多的帮助。特别是陆秋文师兄,在工作压力很大的情况下,对实验室的每个师弟师妹都给与了很多帮助,看到的有价值的知识、文章也第一时间分享给师弟师妹们,给我树立了优秀的榜样。感谢组里的崔一鸣、罗成志、李白杨师弟,邹雨婷、柳幼婷、周钊宇、王钰、尚晶晶、马小青师妹,在日常学习和生活中的陪伴。
+
+还更要感谢同一届的李杰、贺文梁、张中一、杨兴东、张晓欧、田湘、钟友兵同学,在三年研究生生活中他们是陪伴我最多的人,虽然大家的研究方向各不相同,但一直在一起拼搏、一起奋斗,总找机会一起分享学习上的经验和成果。
+
+\begin{flushright}
+陈冠林
+
+2019年6月写于国科大
+\end{flushright}
+\chapter{作者简历及攻读学位期间发表的学术论文与研究成果}
+
+\section*{作者简历}
+
+\subsection*{本文作者}
+陈冠林,安徽省合肥市人,中共预备党员,中国科学院信息工程研究所硕士研究生。
+
+2012.9——2016.6 北京理工大学~~软件学院
+
+2016.9——2019.6 中国科学院信息工程研究所
+\subsection*{完成项目}
+去重降载专项
+
+\section*{指导老师简介}
+刘庆云,男,1980年生,博士,ACM和IEEE会员,中国计算机学会会员,2010年获得新疆科技进步一等奖,2012年入选信息工程研究所(下简称信工所)首届青年之星,2013年获得信工所重大科技进展奖。现任信工所高级工程师,硕士生导师,信息智能处理技术研究室(第二研究室)处理架构组组长,主要研究方向为信息内容安全,侧重网络安全系统体系架构、网络内容深度分析与检测理论与技术。作为主要负责人,先后承担国家863计划重大项目、国家242信息安全计划项目以及工信部、发改委等多项国家级重大信息安全科研项目。主持研发的多个系统在国家重要部门实际应用效果显著。近三年发表论文20篇,申请专利12篇,软著6项。
+\cleardoublepage[plain]% 让文档总是结束于偶数页,可根据需要设定页眉页脚样式,如 [noheaderstyle]
+
diff --git a/Tex/Chap_1.tex b/Tex/Chap_1.tex
new file mode 100644
index 0000000..c0dd36f
--- /dev/null
+++ b/Tex/Chap_1.tex
@@ -0,0 +1,208 @@
+\chapter{绪论}\label{chap:introduction}
+
+本章首先介绍面向网络流量的重复音视频文件实时检测系统的研究背景、意义和系统要求,并对国内外研究现状进行详细分析。最后简要介绍本文的主要组织架构。
+
+\section{研究背景及意义}
+
+随着互联网的发展,网络已经渗入到我们生活的方方面面,越来越多的人使用网络观看和下载音视频节目。根据中国互联网信息中心(CNNIC)发布的第42次《中国互联网络发展状况统计报告》\cite{CNNIC2018}显示。截至2018年6月,如表~\ref{tab:Numberofuser}~所示,我国的网民规模达8.02亿人,其中网络视频用户约为6.09亿人,较半年前增长了5.7\%,网络音乐用户约为5.55亿人,较半年前增长了1.2\%。
+
+\begin{table}[!htbp]
+ \bicaption{网民人数以及音视频用户人数}{Number of netizens and number of multimedia netizens}
+ \label{tab:Numberofuser}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lccc}
+ \hline
+ &2017.12用户规模(万)&2018.6用户规模(万)&半年增长率\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ 网民总数&80166&77198&3.8\%\\
+ 网络视频用户&57892&60906&5.2\%\\
+ 网络音乐用户&54809&55482&1.2\%\\
+ \hline
+ \end{tabular}
+\end{table}
+
+其中网络直播和短视频用户飞速增长,2017年起,尤其在2018年春节期间,短视频应用迅速下沉至三、四线城市,用户规模持续增长。截至2018年6月,综合各个热门短视频应用的用户规模达5.94亿人,网络直播的用户规模也达到4.25亿人,较去年年末增涨294万人。
+
+随着通过网络观看音视频节目的人数不断增加,音视频流量也早已成为网络流量中占比最高的流量,根据Sandvine发布的2018年全球互联网观察\cite{PhenomenaReport}显示,如表~\ref{tab:Differentapplication}~所示,视频流量已经占据全球流量的57.69\%,较去年增长了22.43\%。
+
+\begin{table}[!htbp]
+ \bicaption{不同应用网络流量占比}{Global application category traffic share}
+ \label{tab:Differentapplication}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lcc}
+ \hline
+ 应用名&流量占比&全年增长率\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ 视频流&57.64\%&22.43\%\\
+ 网页&17.01\%&20.98\%\\
+ 游戏&7.78\%&2.68\%\\
+ 社交媒体&5.10\%&3.73\%\\
+ 电子商务&4.61\%&1.90\%\\
+ 文件分享&2.84\%&22.05\%\\
+ 即时通讯&1.72\%&8.12\%\\
+ 安全&1.41\%&7.48\%\\
+ \hline
+ \end{tabular}
+\end{table}
+
+美国下载流量前十位的应用中,有四种应用提供音视频服务,如表~\ref{tab:Differentdownload}~所示,分别为Netflix,YouTube,HTTP media stream,Amazon Prime。
+
+\begin{table}[!htbp]
+ \bicaption{美国下载流量排行}{US application traffic share}
+ \label{tab:Differentdownload}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lcc}
+ \hline
+ 应用名&流量占比&全年增长率\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ Netflix&14.97\%&2.92\%\\
+ HTTP media stream&13.07\%&4.84\%\\
+ YouTube&11.35\%&3.03\%\\
+ Raw MPEG-TS&4.39\%&4.11\%\\
+ HTTP (TLS)&4.06\% &2.06\%\\
+ QUIC&3.87\%&1.43\%\\
+ Amazon Prime&3.69\%&0.87\%\\
+ HTTP download&3.69\%&1.45\%\\
+ HTTP&3.22\%&4.80\%\\
+ PlayStation download&2.67\%&0.45\%\\
+ \hline
+ \end{tabular}
+\end{table}
+
+根据\cite{CiscoReport},到2022年全球音视频流量将占到全球网络流量的82\%,高于2017年的75\%。从2017年到2022年,互联网视频流量将增长四倍,复合年增长率约为33\%。
+
+根据\cite{Yu}发表的论文,发现大部分的视频用户会重复下载观看同一部电影,且YouTube流量中约30\%的重复,如图~\ref{fig:youtube}~所示。可见,随着音视频流量的不断增多,大量重复的音视频流量通过网络进行传播。
+\begin{figure}[!htbp]
+ \centering
+ \includegraphics[width=0.80\textwidth]{youtube}
+ \bicaption{PACK网络冗余检测结果}{PACK redundancy elimination ratio}
+ \label{fig:youtube}
+\end{figure}
+
+代理服务器,入侵检测系统等网络中间件需要通过处理分析流量,以达到版权保护、色情检测等目的。然而对这些重复流量的处理消耗了这些网络中间件大量带宽,存储和处理资源,同时也降低了整个网络性能。如果这些网络中间件能在音视频流量建立连接时,对这些重复的音视频文件进行检测,就可以提高网络性能并节约服务器资源。
+
+综上所述,在网络流中识别重复的音视频文件具有重要意义。针对网络流量中音视频文件的重复传输问题,本文提出一个准确性高的、实时的重复音视频文件实时检测系统。
+
+\section{系统要求}
+面向网络流量的重复音视频文件实时检测系统,其有两个要求:
+
+1.检测的实时性:为了在网络流量中检测重复的音视频文件,从而减少代理服务器等网络中间件的消耗,要尽可能在传输结束前完成检测。选取的特征应主要为音视频流量头部的信息,以保证检测的实时性。
+
+2.算法的准确性:由于代理服务器入侵检测系统等网络中间件需要对网络流量进行分析从而达到版权保护和色情检测的目的。因此对于音视频文件的重复性检测,其对准确性的要求较高。
+
+\section{国内外研究现状}
+\subsection{研究现状概述}
+在网络流量中对重复的音视频文件实时检测主要可以分为两种方法:
+
+第一种只分析网络流量,不进行音视频文件的编解码,即网络冗余发现系统,现大量应用于云计算,企业内部网络等,其优点是大大减少了网络的建设成本,缺点是无法检测相似的网络流量。 如:\cite{Spring2000}提出一种无需对网络协议进行解析的网络冗余发现系统,该论文首次提出一种端到端的模型,并使用拉宾指纹对传输的流量分块。
+
+还有一些网络冗余发现系统带有一定的预测功能。\cite{Zohar2014}提出一种具有预测性的网络冗余发现算法,该方法与前人提出的基于端到端网络冗余发现系统的不同点在于,该方法不要求服务器持续维护客户端状态(紧密的缓存同步问题),大大地减少了服务器的开销。
+
+网络冗余发现系统被用于不同的网络细分领域,与使用的场景密切结合,用于减少该场景下资源的消耗。\cite{Lei2013Cooperative}提出了一种用于云计算中的网络冗余发现,该系统即能够发现短冗余又能发现长冗余。\cite{Aggarwal2010EndRE}提出的网络冗余发现系统被用于广域网的优化。\cite{Sanadhya2012}将现有的网络冗余发现技术用于手机中。\cite{Halepovic2012}在数据链路层解决无线局域网中的网络冗余问题,在该论文中也首次提出在无线网络中发现网络冗余的四个挑战。
+
+第二种是对音视频文件编解码,即大规模相似音视频检索( NDVR )系统,现大量应用于版权保护,特定视频检测等领域。其优点是检测的准确率较高,缺点是消耗大量的资源,无法实时地检测视频的相似性。如\cite{Hao2016Stochastic}为了提高检索相似音视频的准确性和速度,提出了一种新的随机多视图哈希算法,以促进大规模相似音视频检索( NDVR )系统的构建。最后使用两个公开的相似音视频数据集对该系统进行检验,与各种最先进的NDVR系统进行比较有着较明显的优势。
+
+下面我将按照网络的分层结构,详细介绍网络冗余发现系统。
+\subsection{数据链路层的网络冗余发现系统}
+2010年SIGCOMM上,\cite{Lumezanu2010The}等人发表的一篇论文提出了一种针对蜂窝数据网络的冗余发现和消除方法。在该篇论文中,作者在蜂窝络中部署网络冗余发现系统,可以大大减少运营商的网络带宽成本。由于蜂窝链路具有较高的丢包率,给整个网络冗余发现系统带了巨大的挑战。该论文作者提出了一种轻量级机制,来应对此问题。作者将该系统部署在两个北美和一个欧洲运营商处,实验表明该网络冗余发现系统可以将大多数移动用户的带宽消耗降低至少约10%,其中部分带宽消耗减少高达50%,取得了良好的效果。
+
+\cite{Halepovic2012}在2012年ICC上发表的一篇论文提出了一种针对无线局域网链路层的网络冗余发现系统。在论文中,作者证明了在无线局域网中部署网络冗余发现系统是可行的,并将其和使用在以太网中的网络冗余发现系统进行了比较。作者提出了无线局域网网络冗余发现系统所面临的两个挑战:一是链路的不稳定性,在数据链路层会出现大量的帧丢失现象。二是无线局域网的数据包中包含大量管理和控制帧,导致IP数据的比例下降。作者通过实验证明,虽然网络消冗系统在无线局域网中的效果并没有在以太网中那么明显,但依旧可以节约23\%的带宽消耗。主要的原因是数据链路层需要对丢失的帧进行再重传,产生了大量的额外流量。
+
+\subsection{网络层的网络冗余发现系统}
+
+为了避免对大量的帧丢失进行处理,大量的网络冗余发现系统都工作在网络层和应用层。\cite{Spring2000}提出了一种识别重复数据传输的技术,并用它来分析网络流量中的冗余。整个系统分成发送端、接收端和存储数据块及其指纹的数据集,指纹数据集需要在发送端和接收端保持同步。该系统是一个典型的基于端到端模型的网络冗余发现系统,系统所使用的模型和算法思想也影响了许多后人的工作。发送端的流程如下:第一步:发送端对网络流量基于内容进行分块,使用了滑动窗口技术,用拉宾指纹算法来确定网络流量分块的边界。使用拉宾指纹的目的是为了避免不变的分块对数据的更改十分敏感。其原理如下图~\ref{fig:Content-based}~所示。第二步:每次计算给定滑动窗口W内的数据内容的指纹FP(如W),并基于最大给定的大整数D取模后,与给定的余数进行了对比,若相等(如W2)则窗口的右端为数据划分边界,否则(如W1)将窗口继续往右滑动一个字节,依此循环地进行计算和对比,直到达到文件末尾。基于内容分块可以将更改对数据划分的影响控制在更新位置附近的少数几个分块内,并保持其他分块不变。第三步:发送端将分块后的数据在数据集中进行查询,若在数据集中查询到该分块,则将该分块替换成其在数据集中对应的指纹。若在数据集中查询不到,则使用新的指纹对需要传输的数据块进行标记,并存储在数据集中。接着将新指纹和需要传输的数据块一起发给接收端,以保证接收端的数据集的更新。接收端的流程如下:接收端收到发送端的网络流量后,需要对网络流量进行还原,若是新指纹,则记录在数据集中。若是旧指纹,则对指纹进行还原,填充相应的数据块。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Content-based}
+ \bicaption{基于内容分块原理}{Content-Defined Chunking approach}
+ \label{fig:Content-based}
+\end{figure}
+
+\cite{Yu}提出了PACK压缩算法,实现了一个专门为云计算设计的网络冗余发现系统。整个系统也采用了典型的基于端到端的模型,但本文提出了一种新的网络冗余发现算法PACK,不同于以往的算法,PACK有一定的预测性质。PACK也减少了发送端的开销,无需发送端和接收端紧密缓存两个数据集。简单介绍一下PACK算法。PACK接收端算法步骤如下,当接收端接收到发送端发送的数据分块,会在数据集中查询。若在数据集中查询到该数据分块,则返回数据集中该数据分块的下一块数据指纹,即PRED,并等待发送端回复PRED-ACK。若发送端回复PRED-ACK,则认为下一块数据已经接收,以此类推。若在数据集中查询不到,告知发送端继续发送。PACK发送端算法如下,发送端将需要发送的信息用弱哈希进行分块,并生成指纹,在发送过程中会不断接收PRED,将其中包含的指纹与将要发送的分块数据的指纹进行对比,若相同,则返回PRED-ACK。若不相同,则返回数据块。PACK的启发是,文件的重复是可预测的,文件的部分内容和整个文件是有相关性的,可用文件的部分内容标识整个文件。
+
+\subsection{应用层的网络冗余发现系统}
+
+在2012年的NSDI上,\cite{Aggarwal}发表一篇文章,提出一种不同于中间件的网络冗余发现系统,可适用于加密流量和最后一跳是移动设备的应用场景,使用了一种新的优于拉宾指纹的算法。该算法无论在缓存开销方面还是在CPU占用方面都优于以往的算法,缺点是无法发现较短的冗余。该系统需要在用户的电子设备额外安装一个客户端,客户端加解密工作是由原本的TLS协议完成,因此该系统只是适用于企业内部网络,以减少企业在网络建设上的消耗。该篇论文中提到了基于中间件的网络冗余发现系统的缺点,主要包含以下两点:1)随着HTTPS的普及,越来越多的网站使用全站加密,基于中间件的网络冗余发现系统,不能很好地应对加密流量。2)随着移动互联网的兴起,越来越多的人使用手机,基于中间件的网络冗余发现系统无法提高移动设备中最后一跳链路的性能。
+
+从2012年起,对移动设备的网络冗余发现也越来越得到了人们的关注。\cite{Feng2013How}发表一篇文章提出了一种组合不同的网络冗余发现和消除技术的方法,如:增量编码,网络缓存等。在移动网络中减少了约30\%的流量。本文的作者调查了移动设备上的HTTP缓存使用情况,发现移动设备在Web缓存,协议规范等方面与电脑设备之间存在巨大差距。完全支持HTTP缓存协议并且严格遵循该协议的智能手机应用程序和移动浏览器,可以减少约17\%左右的移动流量。
+
+\subsection{国内外研究现状小结}
+国内外研究现状的小结,见表~\ref{tab:Summaryofresearch}~所示,这些研究成果要么无法对重复的网络流量进行实时检测,要么都是基于端到端系统,无法在网络中间件中使用。基于音视频内容的NDVR系统消耗大量的资源,且实时性较差,并不适合在网络传输环境中使用。但这些研究成果却有许多可借鉴之处:一是系统应充分考虑网络传输中丢包,乱序等特性对检测结果的影响。二是系统应充分考虑不同网络设备的适用。三是系统应充分考虑内存消耗的问题,可以在内存集约的情况下工作。四是文件的部分内容和整个文件是有相关性的,可用文件的部分内容标识整个文件。
+\begin{table}[!htbp]
+ \bicaption{国内外研究现状小结}{Summary of research status}
+ \label{tab:Summaryofresearch}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabularx}{\textwidth}{lccX}
+ \hline
+ 研究方法&代表工作&会议&创新点\\
+ \hline
+ \multirow{2}*{链路层}&RTE in WLAN&ICC'12&解决了数据帧丢失和IP数据比例下降问题。\\
+ ~ & Effect of Packet loss & SIGCOMM'10 & 首次将网络冗余发现系统应用于蜂窝数据网络。 \\
+ \multirow{2}*{网络层}&Protocol-independent RE Algorithms&SIGCOMM'00&首次提出基于端到端的网络冗余发现系统,且可应用于任何非加密流量。\\
+ ~ & end to end TRE & ICNP'12 & 可适用于不同网络设备,专门为云计算设计的网络冗余检测和消除算法。 \\
+ \multirow{3}*{应用层}&EndRE&NSDI'12&适用于加密流量和最后一跳是移动设备的应用场景,同时使用了一种新的优于拉宾指纹的算法。\\
+ ~ &Combination of different techniques&PAM'13&组合不同的网络冗余发现和消除技术的方法,如:增量编码,网络缓存等。在移动网络中减少了约30\%的流量。\\
+ ~ &PACK&TON'14&提出了一种具有预测性的网络冗余检测系统,大大减少了服务器端的压力。\\
+ 基于内容&NDVR&TMM'17&提出了一种新的随机多视图哈希算法,以促进大规模相似音视频检索( NDVR )系统的构建。\\
+ \hline
+ \end{tabularx}
+\end{table}
+% 最后作者采用了现有的四种冗余发现方法,分别采用了HTTP缓存,文件压缩,增量编码和数据包流压缩(MODP),最终减少了约30\%的手机流量。
+% 其结果如图\ref{fig:Mobiledevice}所示。
+
+% \begin{figure}[!htbp]
+% \centering
+% \begin{subfigure}[a]{0.90\textwidth}
+% \includegraphics[width=\textwidth]{Mobiledevice1}
+% \caption{}
+% \label{fig:Mobiledevice1}
+% \end{subfigure}%
+% \\~% add desired spacing
+% \begin{subfigure}[b]{0.80\textwidth}
+% \includegraphics[width=\textwidth]{Mobiledevice2}
+% \caption{}
+% \label{fig:Mobiledevice2}
+% \end{subfigure}
+% \bicaption{移动设备流量减少结果}{Mobilephone traffic reduce results}
+% \label{fig:Mobiledevice}
+% \end{figure}
+\section{论文的主要内容与组织结构}
+\subsection{论文的主要内容}
+本文的研究目标:针对网络中间件消耗大量资源处理重复音视频文件的问题,设计并实现面向网络流量的重复音视频实时检测系统。
+
+本文实现的系统包含两个关键技术:1)预测性文件标识生成方法。该技术在流量建立连接时,生成一个具有预测性的文件标识。主要步骤如下:选取流量头部的一些特征,通过MD5等哈希算法,生成文件标识。2)基于流式模糊哈希的重复音视频检测方法。主要步骤如下:在数据集中查询文件标识,同时计算流量的流式模糊哈希值,用于验证标识的准确性,指导标识的生成。
+
+实验证明,该系统可以去除约60\%的重复文件,不影响内容分析模块特定音视频文件检测的准确率。
+\subsection{组织结构}
+论文内容共分为六章,每章的安排如下:
+
+第1章 绪论。本章简要介绍了论文的背景和研究意义,对国内外的研究现状进行全面详细分析,最后简要介绍本文的研究内容和组织结构。
+
+第2章 前人的工作基础。本章介绍了流式模糊哈希和基于流式模糊哈希的相似性查找系统的原理,并分析各自的使用原因,和优点。
+
+第3章 预测性文件标识生成方法。本章介绍了预测性文件标识生成方法,通过特征筛选生成了6个候选的文件标识。
+
+第4章 基于流式模糊哈希的重复音视频检测方法。本章介绍了该方法的步骤,通过实验证明了该方法的可行性,实验的结果指导了文件标识的生成。
+
+第5章 系统设计与实现。本章介绍了重复音视频实时检测系统的设计与实现,并在某公司网关对整个系统进行了测试。
+
+第6章 总结与展望。本章对本文的工作进行全面总结,并对未来的工作进行了展望。
+
+\section{小结}
+本章首先介绍了本文的研究背景和意义,大量重复的音视频文件通过网络进行传输,对这些重复的文件进行检测可以减少网络中间件的消耗,提高整个网络性能。接着提出了重复音视频实时检测系统的两个要求:一是检测实时性。二是算法准确性。然后介绍了国内外的研究现状,并按网络分层的顺序对网络冗余发现系统进行了详细介绍。最后简要介绍了本文的研究内容和组织结构。 \ No newline at end of file
diff --git a/Tex/Chap_2.tex b/Tex/Chap_2.tex
new file mode 100644
index 0000000..bf2700a
--- /dev/null
+++ b/Tex/Chap_2.tex
@@ -0,0 +1,127 @@
+\chapter{前人的工作基础}\label{chap:introduction}
+
+本章首先详细介绍整个系统所使用的两个前人的研究成果,分别是流式模糊哈希和基于流式模糊哈希的相似性查找系统。接着介绍使用这些研究成果的原因和各自的优点。
+
+\section{使用流式模糊哈希的原因及其优点}
+在网络流量中识别重复的音视频文件,需要一个基准来识别文件是否相同。根据\cite{aoli2010}的论文,一般使用MD5对文件是否相同进行检测。MD5是一种很常见的哈希算法,哈希算法从数学上看是一种函数映射,其可以将任意大小的文件或者任意长度的字符串映射成一个固定长度的字符串。
+
+但在真实的网络环境中,由于人们的观看习惯,例如拖动在线视频的进度条,或者手动关闭视频网页,导致网络中传输的音视频文件常常会出现缺损,因此,对于同一个音视频文件版本,很难出现两个完全相同的文件,往往需要对文件的相似性进行检测。
+
+普通哈希算法具有雪崩效应。当文件中的任意一个比特位发生改变时,会导致生成的哈希值完全不同。虽然这一特性对于检测文件是否缺损,或者检测文件是否改变有着重要作用,但却无法对于两个文件进行相似性检测。
+
+为了能够对文件的相似性进行检测,提出了模糊哈希算法。和普通哈希相比较,模糊哈希可以比较两个不同版本文件的相似度,其主要使用在计算机数字取证和恶意代码分析领域。模糊哈希的特点如下:
+
+非传播(Non-propagation):只有与二进制的变化部分线性对应的模糊哈希值部分才会被更改。这意味着文件中任何部分的微小变化都不会引起哈希值急剧变化。
+
+对齐鲁棒性(Alignment robustness):大多数哈希算法都对两个文件是否对齐十分敏感。仅仅向文件中删除或插入单个字节将导致最后生成两个完全不同的哈希值。模糊哈希算法的核心是用于在二进制中生成一系列重置点的弱哈希(rolling hash)。且该重置点仅取决于直接上下文。
+
+下面简单介绍一下生成模糊哈希的步骤,如公式~\ref{eq:chufa}~所示,模糊哈希使用名为块大小b的变量来触发重置点,块大小可以用1计算,这可以确保某个文件的模糊哈希结果长度既不会太长不能比较,也不会太短从而避免冲突。 bmin是常量最小块大小,S是常量预期模糊散列长度,n是输入文件大小。
+
+\begin{equation} \label{eq:chufa}
+b_{init}=b_{min}2^{\lfloor \log_2(\frac{n}{Sb_{min}}) \rfloor}
+\end{equation}
+
+k窗口的弱哈希函数在k字节的输入序列:$c_1$,$c_2$,... ,$c_k$,当满足公式~\ref{eq:rollinghash}~时,重置点将位于$c_k$。根据统计上,当b值越小时,越多重置点将触发。 然后使用基于FNV算法的强哈希来产生两个重置点之间区域的哈希值。 最后将来自每个重置点FNV哈希中的字符进行串联,得到最终的模糊哈希值。
+
+\begin{equation} \label{eq:rollinghash}
+rollling hash (c_1,c_2,...,c_k) mod b = b - 1
+\end{equation}
+
+模糊哈希算法已被应用于许多领域,因为它能够比较两个不同的文件并确定其相似性。然而,在网络流量中使用模糊哈希来识别正在传输中的相似文件时,却遇到了许多问题和挑战:
+
+1) 无法确定文件长度:文件长度可能在文件传输结束之前无法确定。例如,在HTTP 1.0及之前版本中,content-length字段是可有可无的。在HTTP1.1及之后版本。如果是keep alive模式,则content-length和chunk字段二选一。若是非keep alive模式,则和HTTP1.0一样,content-length可有可无。文件长度虽然对于像MD5和SHA-1这样的哈希算法可有可无。但是,根据生成模糊哈希的步骤,文件长度是模糊哈希生成的关键参数之一,用于生成之前提到的触发值以生成哈希片段。
+
+2)流式处理:为了生成适当长度的签名,模糊哈希需要完整的文件,但将网络中传输的音视频文件完全存储在硬盘上是不切实际的,这样会消耗网络中间件大量的存储资源。
+
+3)无序输入:现在的传输技术会将文件分成片段以实现传输的高效性和灵活性,例如多线程下载,P2P文件共享和网络中继服务(例如百度云上传文件)。 如图~\ref{fig:Multi-threaded}~是典型的多线程传输场景,其中灰色块表示文件片段。在时间t3,可能出现0-3M范围内的任何文件片段。 模糊哈希算法只能从文件头或复位点进行计算,因此必须缓冲无序片段,直到收到所有前面的数据。在最坏的情况下,几乎整个文件都被缓冲,这使得内存消耗十分巨大。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Multi-threaded}
+ \bicaption{多线程传输场景}{Multi-threaded transmission scenario}
+ \label{fig:Multi-threaded}
+\end{figure}
+
+
+4)不完全捕获:由于数据包丢失或处理错误,从网络流量捕获的文件不完整是一种常见的结果。
+
+针对以上问题,本文采用流式模糊哈希算法以生成流量摘要。该算法采用存储中间计算结果的方法进行流式数据摘要计算,能够处理数据缺损、乱序和重叠的情况,并且采用伽罗华域(Galois Field)的乘法运算作为强哈希算法减少内存占用。
+
+该算法有以下优点:
+
+1)可以生成流式数据的摘要,用于网络流量的相似性检测。引入多个区间数据索引以及中间状态,以满足复杂的网络环境中数据块的无序输入。
+% 流式模糊哈希处理乱序的流量输入结构图如图\ref{fig:handleout-of-order}:
+
+% \begin{figure}[!htbp]
+% \centering
+% %trim option's parameter order: left bottom right top
+% \includegraphics[width=0.80\textwidth]{handleout-of-order}
+% \bicaption{处理乱序结构示意图}{Handling out-of-order structure}
+% \label{fig:handleout-of-order}
+% \end{figure}
+
+2)可以在内存集约的环境下工作,从而降低代理服务器、入侵检测系统等网络中间件的开销。
+
+3)只要对流量处理一次就可以生成模糊哈希值,无需对海量的流量进行存储。
+
+典型的流式模糊哈希值如下所示:第一个是完整数据生成的,第二个是不完整数据生成的。
+
+\begin{lstlisting}[title=流式模糊哈希值, frame=shadowbox]
+5Howh395h39h/ZF9e3Tg6S9d8CY9QXA4A5A4z48ZQdVZcuXVBAIkV2R3prfywFlL8xwY2[0:36746924]
+
+CKStpol/OlnrBBIphB05v1E5qe[0:490021]Y8mWpVC3no6pzoEP6QCCMfZlowXRKI1JoLb[523900:950187]
+\end{lstlisting}
+
+其中方括号里的数字为数据的偏移量,例如[0:490021]表示前一段的模糊哈希值是由第0字节到第490021字节的网络流量所生成的,0是流式模糊哈希值的左偏移量,490021是流式模糊哈希值的右偏移量。
+
+% 上文提到了,流式模糊哈希三个优点:
+
+% 1)可以生成流式数据的摘要,用于网络流量的相似性检测。
+
+% 2)可以在内存集约的环境下工作。
+
+% 3)只要对流量处理一次就可以生成模糊哈希值。
+
+% 以上的三个优点使得流式模糊哈希可以在代理服务器中使用,以获取真实环境中的音视频流量摘要。但由于用户的观看习惯和大量的音视频网站采用流媒体技术,所以导致了缺损的不完整的音视频文件。对于完整度较低文件的相似性比较,流式模糊哈希有着一定的局限性。
+
+% 下文将在真实环境中的代理服务器中部署流式模糊哈希算法,并计算音视频流量的摘要,以获取真实环境的数据,来帮助判断网络流中传输的音视频文件是否重复。
+
+\section{使用基于流式模糊哈希的相似性查找系统的原因及优点}
+采用流式模糊哈希值作为基准对文件的相似性进行检测时,需要使用基于流式模糊哈希的相似性查找系统。
+
+基于流式模糊哈希的相似性查找系统采用编辑距离对流式模糊哈希值进行相似性检测,若两个流式模糊哈希值的编辑距离越小,则认为越相似。编辑距离,又称Levenshtein距离,是指两个字串之间,由一个转成另一个所需的最少编辑操作次数。许可的编辑操作包括将一个字符替换成另一个字符,插入一个字符,删除一个字符。
+
+编辑距离计算的时间复杂度为O($n^2$),为了减少大规模相似查找的时间,大部分的字符串查找系统采用n-gram构建索引。在\cite{Chen2008Efficient}中,作者使用n-gram作为索引,以减少候选字符串数量。n-gram是来自给定字符串的n个字符的连续序列。 为了生成n-gram,它每次从第一个字符向前移动一步字符。 例如,如果n为3,则字符串“abcde”经过处理变为“abc”,“bcd”和“cde”。 每个gram的长度可以是任何常数。
+
+通过n-gram建立索引的一个问题是随着字符串数量的增长,一个gram的链接列表可能太长。基于流式模糊哈希的相似性查找系统通过剪枝被用来解决这个问题。在进行相似查询时,该系统将查询的模糊哈希的字符串分为几个块。为了在查询时进行加速,使用n-chunk代替n-gram。 n-chunk也是来自给定字符串的n个字符的连续序列。但是n-chunk每次从第一个角色向前移动n步。以字符串为例,如果n为3,则字符串“abcdef”经过处理变为“abc”和“def”。
+% \begin{figure}[!htbp]
+% \centering
+% %trim option's parameter order: left bottom right top
+% \includegraphics[width=0.80\textwidth]{Schematicdiagram}
+% \bicaption{基于n-gram的标识匹配算法结构示意图}{Schematic diagram of n-gram-based identification matching algorithm}
+% \label{fig:Schematicdiagram}
+% \end{figure}
+
+% 然后将这些流式模糊哈希生成的gram插入哈希表中。通过n-gram建立索引的一个问题是随着字符串数量的增长,一个gram的链接列表可能太长。该查找系统通过剪枝被用来解决这个问题。
+
+% 在进行相似查询时,系统将查询的模糊哈希的字符串分为几个块。为了在查询时进行加速,使用n-chunk代替n-gram。因为字符串的块数小于gram数。 n-chunk也是来自给定字符串的n个字符的连续序列。但是n-chunk每次从第一个角色向前移动n步。以字符串为例,如果n为3,则字符串“abcdef”将溢出为“abc”和“def”。
+
+在此之后,将块放入哈希表中以搜索是否存在相同的块。最后评价相似度的时候使用如下公式~\ref{eq:cfds}~:
+
+\begin{equation} \label{eq:cfds}
+cfds = 10*\left(\frac{C}{chunk\_cnt}\right)\left(\frac{gram\_cnt}{left\_gram\_cnt}\right)
+\end{equation}
+
+其中C表示出现次数,chunk\_cnt表示查询的string中包含chunk的个数,gram\_cnt表示查询串的gram的个数,left\_gram\_cnt表示被比较的字符串gram个数。
+
+该系统有以下优点:
+
+1)专门针对流式模糊哈希值相似性查找进行设计。
+
+2)优化了字符串匹配的查找算法,用n-chunk代替n-gram构建索引,其对800万条的查询速度可以达到2万次/秒。
+
+\section{小结}
+本章主要介绍了两个前人的工作,分别是流式模糊哈希算法和基于流式模糊哈希算法的相似性查找系统,流式模糊哈希主要是在真实环境中生成音视频流量的摘要,作为基准用以生成文件编号,这样就无需对整个网络流量中传出的音视频文件进行存储,极大的节约了存储的消耗。基于流式模糊的相似性查找系统主要用于计算两个流式模糊哈希值之间的编辑距离,评价两个文件之间的相似性。
+% 其优化了字符串匹配的查找算法,其对800万条的查询速度可以达到2万次/秒。
+
+
diff --git a/Tex/Chap_3.tex b/Tex/Chap_3.tex
new file mode 100644
index 0000000..ca37a3d
--- /dev/null
+++ b/Tex/Chap_3.tex
@@ -0,0 +1,310 @@
+\chapter{预测性文件标识生成方法}\label{chap:introduction}
+
+本章首先简要介绍相应的背景知识,接着使用传统的机器学习方法预测文件的重复性,然后详细说明最终采用的生成预测性文件标识方案。
+
+\section{背景知识}
+
+\subsection{决策树}
+
+决策树是一种类似于流程图的树结构,其中每一个非树叶节点可以认为其是数据集中的一个属性上的测试,决策树的每一个分支代表一个测试的结果,每个树的树叶节点存放着一个类的标号,树的最高的顶层节点是根节点,一个典型的决策树的模型如图~\ref{fig:Typicaldecision}~所示。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Typicaldecision}
+ \bicaption{典型的决策树结构图}{Typical decision tree}
+ \label{fig:Typicaldecision}
+\end{figure}
+
+决策树的构建不需要任何领域的知识和参数设定,所以非常适合用于探索式的知识发现。
+% J.Ross Quinlan等人开发了ID3决策树算法,此项工作扩展了E.B Hunt等人的工作。而Quinlan等人后来提出了C4.5决策树。
+所有的决策树都采用了类似于贪心算法的思想,绝大多数的决策树分类算法都采用了自上而下的递归算法去构建整个树的结构。随着决策树不断地构建,用于训练决策树的训练集会不断地减少,下面介绍介绍一下决策树地归纳算法。
+
+用三个参数D,attribute list和attribute selection method对决策树进行训练。参数D是数据分区。一开始,其是已经标注过地数据集的全集,参数attribute list用于标识整个标注数据集的属性列表,attribute selection method参数用于标识整个标注数据集属性的启发过程,该过程的思想是贪心算法,用属性选择度量最好的属性进行分类。属性选择的度量方法有很多种,如信息增益和基尼系数等。属性选择的度量方法会导致决策树的树形结构有所不同,比如使用基尼系数其生成的树形结构一定是二叉树。
+
+树的生成从节点N开始,N也是标注数据集D中的一个训练数据集,如果训练集D中的数据其所有的属性是同一类,则节点N变成该决策树的叶子节点,并用其所标注的属性来标记它。如果训练集D中的数据其所有的属性不是同一类,则调用attribute selection method方法,对训练集D进行分裂属性,选择度量会告诉我们如何对训练集进行划分。
+
+常见的分裂准则有信息增益、基尼系数等,例如经典的ID3决策树就使用了信息增益作为属性选择度量。
+
+\subsection{贝叶斯分类方法}
+
+贝叶斯分类方法是一种统计学分类方法。它可以预测类隶属关系的概率,如给定的元组属于一个特定类的概率。
+
+对于给定的$X$的属性描述,找出元组$X$属于类$C$的概率。$P(X|H)$是后验概率,相反的,$P(H)$为先验概率,或者称之为$H$的先验概率。类似的,$P(X|H)$是条件$H$下,$X$的后验概率。
+
+贝叶斯定理提供了一种由$P(X)$、$P(H)$和$P(X|H)$计算后验概率$P(H|X)$的方法贝叶斯定理见公式~\ref{eq:phx}~:
+
+\begin{equation} \label{eq:phx}
+P(H|X) = \left(\frac{P(X|H)P(H)}{P(X)}\right)
+\end{equation}
+
+用向量$X={x_n}$标识每个元组的n维属性
+朴素贝叶斯分类的流程如下:
+
+假设由一个$m$个类的分类方式,其中的类别分别用$C_1$,$C_2$,…,$C_m$表示,给定元组$X$,朴素贝叶斯分类方法将预测$X$属于具有最高后验概率的类(在条件$X$下)。换句话说,朴素贝叶斯分类方法预测$X$属于类$C_i$,当且仅当$P(C_i|X)>P(C_j|X)$,然后根据上面提到的贝叶斯定理最大化$P(C_i|X)$。
+
+由于$P(X)$对于所有的类来说是一个常数,所以仅仅需要$P(X|C_i)P(C_i)$最大即可。若类的先验概率未知,则通常假设这些类都是等概率的。也就是说$P(C_1)=P(C_2)= … =P(C_m)$,并以此为依据对$P(X|C_i)$做最大化估计。由于给定了许多属性的数据集,想要计算$P(X|C_i)$的值,需要消耗大量的时间,为了能够降低$P(X|C_i)$计算时所消耗的资源,遂对计算进行简化。可以用类条件独立的朴素假定,将$P(X|C_i)$的计算简化成公式~\ref{eq:pxc}~:
+
+\begin{equation} \label{eq:pxc}
+P(X|C_i) = \prod_{k=1}^nP\left(P(X_k|C_i)\right)
+\end{equation}
+
+\subsection{URL}
+URL是统一资源定位符,通俗的理解就是互联网上的邮编地址,可以在全世界范围内对你所访问的资源进行唯一的标记。URL可以从一个特定的服务器来获取资源。大多数URL会遵循一种标准,这种标准包含以下三个方面:URL的第一个部分是指所访问的资源类型所使用的协议,目前大多数的网站使用的是HTTP协议,或者由HTTP+SSL组成的HTTPS。第二个部分是网址,如www.baidu.com。剩下的部分则指向了服务器上的所要访问的资源。随着交互式网页应用的网页开发技术Ajax等技术的兴起,越来越多的网站采用动态URL技术,使得互联网上的同一资源,往往有几个不同的URL。
+% 这也使得对于URL很多情况下无法对同一资源进行唯一标识。
+\subsection{HTTP缓存}
+% 接着介绍一下HTTP的缓存,
+不同的用户在访问互联网上的同一资源时,服务器会多次传输同一份文件,每次通过网络传输给用户,会消耗大量的网络资源,也会造成整个互联网数据的拥塞。所以HTTP协议采用了许多与网络缓存相关的技术。让用户缓存一个服务器资源的副本,这样就可以节约许多带宽资源。
+
+HTTP缓存有以下优点:1)减少了网络流量的冗余传输,节约了大量的网络带宽。2)加快了用户的访问速度,可以很快的加载已经缓存的页面。3)减少了原始服务器的载荷,使得服务器可以更快地响应用户的请求,也可以避免服务器过载现象的出现。4)降低了整个网络的时延,可以较快地访问较远的资源。
+
+然而,原始服务器中的资源可能随着时间的推移发生改变,这里就提到了一个再验证的概念,其表示用户会不断的对其缓存的副本是否过时进行检测。为了有效的进行副本的再验证,HTTP也对其定义了一些特殊的请求。其中HTTP的条件方法可以高效地进行再验证。HTTP允许缓存向原始服务器发送一个条件GET请求,当原始服务器发现其缓存的副本已经过时,才会向请求端返回一个对象主体。HTTP一共定义了5个条件请求部首,其中和缓存相关的有两条,分别是If-Modified-Since和If-None-Match。其中If-Modified-Since对应的标签是LastModified,而If-None-Match对应的是Etag标签,当原始服务器端的资源发生变化时,服务器会返回一个新的资源和该资源的相应的新的Etag标签和一个LastModified标签。
+
+如图~\ref{fig:Flowchartofrequest_fi}~为用户发送第一次请求的流程图,如图~\ref{fig:Flowchartofrequest_ag}~为用户再次发送请求。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Flowchartofrequest_fi}
+ \bicaption{用户发出第一次请求}{Flowchart of the first request from the user}
+ \label{fig:Flowchartofrequest_fi}
+\end{figure}
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Flowchartofrequest_ag}
+ \bicaption{用户再次请求}{Flowchart of the second request from the user}
+ \label{fig:Flowchartofrequest_ag}
+\end{figure}
+
+\subsection{特征选择}
+特征选择是一种的数据预处理过程,对于许多高维度空间的样本来说,用大量的特征属性构建机器学习模型会消耗许多资源,所以需要通过特征选择,删除或者合并冗余的特征,选取出尽可能小的特征子集。整个特征选择的框架如图~\ref{fig:Frameworkforfeature}~所示。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Frameworkforfeature}
+ \bicaption{特征选择的框架}{Framework of feature selection}
+ \label{fig:Frameworkforfeature}
+\end{figure}
+
+特征选择分为基于搜索策略划分的特征选择方法和基于评价准则划分的特征选择方法。基于评价准则划分的特征选择方法分为过滤式的特征选择方法和包裹式的特征选择方法,其主要的区别在于是否和后续的机器学习算法有关。过滤式的特征选择算法与后续的机器学习算法无关。
+
+过滤式的特征选择算法一般通过一些相关统计量对特征的重要性进行度量,主要采用的的度量标准分为以下几类:距离度量、信息度量、一致性度量和依赖性度量,本文主要采用的度量标准是信息度量和依赖性度量。
+
+包裹式的特征选择方法将学习器的最终结果作为评价的重要指标,使整个样本集针对学习器的不同进行更有个性的优化,达到特征选择的目的。
+
+\subsection{信息熵}
+1948年著名的学者香农第一次提出信息熵的概念,以解决对不同信息的量化度量问题。香农认为任何信息都会存在不同程度的冗余,只是大小问题。一个信息源的冗余取决于其每个符号(数字、单词或者字母)的不确定性。一般来说,对于一个信号源,其出现不同的符号概率越大,则其熵越大。
+
+下面给出信息熵的公式~\ref{eq:H(U)}~:
+
+\begin{equation} \label{eq:H(U)}
+H(U) = -\sum_{i=1}^{n}p_i\log_{10}p_i
+\end{equation}
+
+其中$U$表示信息源,$P_i$表示信息源中每一个符号出现的概率。从机器学习的角度说,一个属性的信息熵反映了该属性所包含的信息量的大小。信息熵越大则说明其包含的信息越多。
+
+\subsection{互信息}
+两个随机变量之间的互信息表示两个变量之间依赖关系的强弱,两个属性之间的互信息也是过滤式的特征选择算法的重要评价标准之一。
+
+与其它的相关系数不同,互信息并不仅仅局限于实值随机变量,它的值主要由联合分布 $p(X,Y)$ 以及边缘分布的乘积 $p(X)p(Y)$所以决定的。互信息度量了两个事件之间的相关性。
+
+两个离散型随机变量 $X$ 和$Y$的互信息可以定义为如下:
+
+\begin{equation} \label{eq:I(X;Y)1}
+I(X;Y) = \sum_{y\in Y}\sum_{x\in X}p(x,y)\log_{10}\left(\frac{p(x,y)}{p(x)p(y)}\right)
+\end{equation}
+
+其中p(x,y)表示$X$和$Y$的联合概率分布函数,$p(x)$和$p(y)$分别是 $X$ 和 $Y$ 的边缘概率分布函数。
+
+在两个随机变量是连续型随机变量的情形下,计算互信息的公式被替换成了二重定积分:
+
+\begin{equation} \label{eq:I(X;Y)2)}
+I(X;Y) = \int_{y}\int_{x}p(x,y)\log_{10}\left(\frac{p(x,y)}{p(x)p(y)}\right)dxdy
+\end{equation}
+
+其中$p(x,y)$当表示$X$和$Y$的联合概率密度分布函数,$p(x)$和$p(y)$分别表示$X$和$Y$的边缘概率密度分布函数。
+
+根据互信息的定义可以看出,互信息表示了两个随机变量$X$和$Y$所共享的信息。例如,如果两个随机变量$X$和$Y$相互独立,则认为随机变量$X$不对$Y$提供任何信息,反之亦然,所以它们之间的互信息的值为零。以上的情况,在特征选择中,属性$X$和属性$Y$都将会被保留。如果随机变量$X$是$Y$的一个线性的函数关系,同时随机变量$Y$也是$X$的一个线性函数,那么$X$和$Y$传递的所有信息被$X$和$Y$共享,可认为随机变量$X$可以决定$Y$的值,反之亦然。这种情况下随机变量$X$和$Y$的信息熵也是相同的。以上这种情况认为随机变量相互依赖。在特征选择中,会去除属性$X$,或者去除属性$Y$。
+
+\section{重复音视频文件预测实验}
+
+\subsection{实验思路}
+使用传统的机器学习方法,对真实环境的音视频文件进行了重复性预测,以此来验证机器学习方法的效果。
+
+\subsection{机器学习算法选择}
+采用朴素贝叶斯和决策树,其原因主要有以下三点:
+
+一是所获取的特征大多数为标称量,很难对其进行量化,且标称量之间的加减没有数学意义。决策树和朴素贝叶斯无需对特征进行数学计算,符合特征主要为标称量的条件。
+
+二是朴素贝叶斯和决策树其模型构建完成后,运算量较小,空间复杂度和时间复杂度符合真实环境对算法高效性的需求。深度学习方法虽然准确度较高,但其需要消耗大量资源,不符合真实环境的需求。
+
+三是决策树的分类器构建不需要任何领域的知识和参数设定,所以非常适合用于探索式的知识发现。
+
+\subsection{实验数据}
+数据集是从国内某公司网关收集的3天真实网络流量,共约450万条音视频数据,整个实验数据的文件大小分布如图~\ref{fig:Allaudio}~。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Allaudio}
+ \bicaption{所有音视频大小分布}{Files size distribution}
+ \label{fig:Allaudio}
+\end{figure}
+
+数据集一共包含15个特征,分别包含了URL,ServerIP,LastModified,Etag,MediaLen,MediaType,音视频文件头部内容1K一直到32K(已经通过MD5进行哈希)。具体的特征如表~\ref{tab:DatasetFeatures}~所示:
+\begin{table}[!htbp]
+ \bicaption{数据集特征}{Dataset features}
+ \label{tab:DatasetFeatures}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{ll}
+ \hline
+ 特征&描述\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ URL&统一资源定位符\\
+ ServerIP&服务器IP\\
+ LastModified& LastModified标识\\
+ Etag&Etag标识\\
+ MediaLen&音视频文件的长度\\
+ MediaType&音视频文件的格式\\
+ 1K、2K、4K…..32K(6个)&音视频文件头部nK的MD5值\\
+ SFH&流式模糊哈希值\\
+ SFH\_len&流式模糊哈希值长度\\
+ \hline
+ \end{tabular}
+\end{table}
+
+上章提到,由于人们的观看习惯,例如拖动在线视频的进度条,或者手动关闭视频网页,导致网络中传输的音视频文件常常会出现缺损。对于缺损严重的文件模糊哈希值也无法判断其是否相似。本文用完整度来表示一个文件的缺损程度,完整度100\%意味着文件无缺损。完整度=所有流式模糊哈希的右偏移减去左偏移量/实际总文件长度(流式模糊哈希的右偏移量和左偏移量定义见上章2.1)。
+
+由于对于完整度较低的文件,模糊哈希值也无法判断其是否相似,因此实验数据选取完整度80\%的文件,共297万条,更高的文件完整度代表该流量将经过更加彻底的分析,整个实验数据的完整度80\%文件分布如图~\ref{fig:80sizedistribution}~:
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{80sizedistribution}
+ \bicaption{完整度80\%音视频大小分布}{Files size distribution(file transfer completeness 80\%)}
+ \label{fig:80sizedistribution}
+\end{figure}
+
+\subsection{数据标注}
+将所有音视频文件的哈希值存储到相似性查找系统中,若该哈希值在系统中找到与该流量相似的摘要,则认为其重复,若在系统中找不到与该流量相似的摘要,则认为其不重复。
+
+\subsection{机器学习库}
+采用的机器学习库是基于python的sklearn,选用sklearn主要有以下原因:
+1)sklearn是一种简单有效的机器学习工具,支持大部分的机器学习算法,减少了使用者对算法库的学习成本。2)sklearn可在各种环境中重复使用,对硬件要求较低。3)sklearn是基于NumPy,SciPy和matplotlib构建的,NumPy等是Python语言的扩展程序库,支持大量的维度数组与矩阵运算。因此使用sklearn可以减少使用者对高纬度的数据集进行处理的成本。
+
+\subsection{特征量化}
+选择的特征和其特征量化方式见表~\ref{tab:Selectedfeatures}~:
+\begin{table}[!htbp]
+ \bicaption{所选特征和其特征量化方式}{Selected features}
+ \label{tab:Selectedfeatures}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{ll}
+ \hline
+ HTTP头部信息&量化方式\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ URL&n-gram\\
+ ServerIP&按8位分割\\
+ MediaTpye&保留原格式\\
+ MediaLen&保留原格式\\
+ Etag&CRC32\\
+ Last-Modified&CRC32\\
+ \hline
+ \end{tabular}
+\end{table}
+
+根据\cite{Sahoo2017Malicious}的论文,通过n-gram的方式对URL提取特征,大量应用于恶意URL分析领域。因此,本文对于URL的特征提取也使用n-gram,并分别尝试了n=3,4,5,6,7的情况。
+
+根据\cite{Tan2018},考虑到观看视频的ServerIP地址和地域之间可能存在的关系,ServerIP地址采用了8位分割。
+
+Etag和Last-Modified采用了CRC32进行哈希,作为一个标量信息处理。之所以采用CRC32是因为使用的机器学习库sklearn最多支持32位的标称量。
+
+\subsection{实验结果}
+采用了交叉验证,随机选取80\%的数据作为训练集,剩下的20\%的数据作为测试集。通过决策树和朴素贝叶斯对音视频文件的重复性进行了预测。
+
+最终的结果为决策树预测是否重复的准确率为68\%,召回率为55\%。朴素贝叶斯的准确率为16\%,召回率为18\%。其主要原因是,音视频流量报文头部的特征较少,与缓存相关的特征大多数为标称量,很难提供较为有用的信息,且由于在内存集约的环境下工作,无法对音视频文件头部进行编解码从而获取编解码后视频的帧信息,使得有用的特征大大减少。
+
+最终采用通过生成音视频文件标识的方式,对文件的重复性进行检测。
+
+\section{预测性文件标识生成}
+通过重复音视频文件预测实验,对使用机器学习方法来预测网络流量中的音视频文件是否重复进行了探索,其结果证明,决策树和朴素贝叶斯等传统的机器学习方法其预测结果的准确率和召回率不理想。需要一种新的方法对在网络流量中的重复音视频文件进行预测。
+
+最终决定使用文件标识的方式对重复的音视频文件进行检测。本段将介绍预测性文件标识的生成,在下章将详细介绍基于流式模糊哈希的重复音视频检测方法。
+
+\subsection{步骤}
+在音视频流量刚刚建立传输开始时,选取音视频流量头部的如Etag,Last-Modified等和缓存相关的一些特征,以及文件头部的一些信息,最后通过MD5等哈希算法生成一个具有预测性的文件标识。
+
+\subsection{特征选择评价标准}
+音视频流量复杂,每种音视频流量其特征也不完全相同。由于一些原始服务器并不支持HTTP缓存,有些音视频流量并不含有Etag和LastModfy等特征,这对音视频流量的特征选取增加了许多挑战,那么如何建立一套行之有效的音视频流量特征评价体系,量化的评价特征的效果?
+
+本文采用过滤式的特征选择方式,其原因主要有以下两点:
+
+一是包裹式选择方式需要训练学习器,使用机器学习方法,但根据上文的实验,传统的机器学习方法无法满足真实环境要求,所以无法使用包裹式的选择方法。
+
+二是选取的音视频流量头部特征多为标称量,过滤式的选择方式其许多评价标准可以适用于对标称量的评价,符合使用环境。
+
+采用了两个度量指标,一是信息熵。信息熵表示一个属性的随机性,信息熵越大表明该属性随机性越强,包含的信息越多。二是互信息。互信息表示两个属性之间的依赖关系的强弱,使用互信息的目的是降低属性的维度,减少获取的属性的数量,选取的属性其之间的互信息越小越好。
+
+\subsection{实验数据}
+与重复音视频文件预测实验数据集相同。
+
+\subsection{音视频特征的信息熵}
+信息熵计算实验结果如表~\ref{tab:CharacteristicInformation}~所示:
+\begin{table}[!htbp]
+ \bicaption{特征信息熵结果}{Features information entropy results}
+ \label{tab:CharacteristicInformation}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{ll|ll}
+ \hline
+ 特征&信息熵& 特征&信息熵\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ 音视频文件头部内容32K&14.47&Etag&14.19\\
+ 音视频文件头部内容16K&14.42&Last-Modified&13.69\\
+ 音视频文件头部内容8K&14.38&MediaLen&12.17\\
+ 音视频文件头部内容4K&14.36&URL&10.57\\
+ 音视频文件头部内容2K&14.34&ServerIP&7.17\\
+ 音视频文件头部内容1K&14.32&MediaType&1.9\\
+ \hline
+ \end{tabular}
+\end{table}
+
+根据实验结果,文件头部内容的信息熵要高于音视频流量报文特征的信息熵。URL这一特征其熵之所以非常低,是因为由于捕获环境的单向流影响,大量的音视频流量是源服务器的应答流量无法获取URL,大部分的URL特征为None,导致URL的熵较低。MediaType的信息熵很低为1.9,因此在生成文件标识时,不使用此特征。最终通过信息熵的结果,筛选出生成文件标识的候选特征为:音视频文件头部内容32K、Etag、Last-Modified、MediaLen、URL、ServerIP。
+
+\subsection{音视频特征的互信息}
+互信息的计算较为复杂,随着数据集规模的增加,其呈指数型增长。因此从原始的数据集中随机抽取了50万条音视频数据。
+
+参与互信息计算的特征只有Etag、Last-Modified、MediaLen、URL和ServerIP一共5个特征。其原因如下:1) 以上5个特征其信息熵较高,可见其含有的信息量较大。2) 对于不同长度的音视频文件内容头部,根据常识,其有较强的相关性。所以只选取了信息熵最大的一个。
+
+互信息的计算结果如表~\ref{tab:Featuremutual}~所示:
+\begin{table}[!htbp]
+ \bicaption{特征互信息结果}{Features mutual information result}
+ \label{tab:Featuremutual}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{ll|ll}
+ \hline
+ 特征&互信息&特征&互信息\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ Etag,Last-Modified&0.94&Last-Modified,URL&0.24\\
+ Etag,MediaLen&0.42&Last-Modified,ServerIP&0.35\\
+ Etag,URL&0.25&MediaLen,URL&0.37\\
+ Etag,ServerIP&0.34&MediaLen,ServerIP&0.39\\
+ Last-Modified,MediaLen&0.42&URL,ServerIP&0.23\\
+ \hline
+ \end{tabular}
+\end{table}
+
+根据互信息的实验结果,Etag和Last-Modified之间相关性较强,其互信息为0.94,选择其中一个作为候选特征即可。一共选取了Etag、MediaLen、URL、ServerIP四个特征作为计算音视频文件标识的基础,并改变选取的音视频文件内容长度nk生成6个不同的文件标识。在下章将对这些文件标识的效果进行评估,最后选取一个合适的文件标识。
+\section{小结}
+本章首先介绍了决策树、朴素贝叶斯、和HTTP缓存等相关的背景知识。接着介绍了重复音视频文件预测实验,在实验中对使用机器学习方法来预测网络流量中的音视频文件是否重复进行了探索,最终的结果为决策树和朴素贝叶斯等传统的机器学习方法其预测结果的准确率和召回率不理想。最后介绍了预测性文件标识,选取音视频流量头部的一些特征生成一个具有预测性的文件标识,并通过熵和互信息对生成标识的特征进行筛选,最后生成了6个不同的文件标识,在下章我们将评估这些文件标识的效果。
diff --git a/Tex/Chap_4.tex b/Tex/Chap_4.tex
new file mode 100644
index 0000000..d87be3c
--- /dev/null
+++ b/Tex/Chap_4.tex
@@ -0,0 +1,145 @@
+\chapter{基于流式模糊哈希的重复音视频检测方法}\label{chap:introduction}
+
+本章首先介绍相关背景知识,接着介绍了基于流式模糊哈希的重复音视频检测方法和如何生成文件编号,然后详细介绍评价标识所采用的标准,最后通过实验证明了预测性音视频文件标识的效果。
+
+\section{背景知识}
+
+% \subsection{网络路由}
+% % 在介绍单项流之前先介绍什么是网络路由,
+% 网络路由是指分组从源到目的地时,决定端到端路径的网络范围的进程。路由工作在OSI参考模型第三层——网络层的数据包转发设备。路由器通过转发数据包来实现网络互连。路由器通常连接两个或多个由IP子网或点到点协议标识的逻辑端口,至少拥有1个物理端口。路由器根据收到数据包中的网络层地址以及路由器内部维护的路由表决定输出端口以及下一跳地址,并且重写链路层数据包头实现转发数据包。路由器通过动态维护路由表来反映当前的网络拓扑,并通过网络上其他路由器交换路由和链路信息来维护路由表。如图\ref{fig:route}为典型的Windows路由表
+% \begin{figure}[!htbp]
+% \centering
+% %trim option's parameter order: left bottom right top
+% \includegraphics[width=0.60\textwidth]{route}
+% \bicaption{路由表}{route table}
+% \label{fig:route}
+% \end{figure}
+\subsection{单向流}
+路由的不对称性在整个网络中是普遍存在的。对于每个网络上的节点,会根据当前网络的情况和目的IP独立地选择不同的路由路径,但由于多条等价路由或者负载均衡等因素的影响,路由的不对称性是一种无法避免的现象。如果两个端点之间传输的数据包其上行和下行之间的物理链路是相同的(即经过网络转发设备顺序是逆序相等的),则它们是路由对称的。否则称为不对称路由。在物理链路上只能观察到上行或者下行数据包的网络流,称之为单向流。单向流是不对称流量的一种,通常对单向流下行数据包的处理无法获取其访问的URL等信息。
+
+\subsection{流式模糊哈希相似不具有传递性}
+% 上文提到了流式模糊哈希采用存储中间计算结果的方法进行流式数据摘要计算,能够处理数据缺损、乱序和重叠的情况,并且采用伽罗华域(Galois Field)的乘法运算作为强哈希算法减少内存占用,使得本算法能够在内存集约的情况下实时计算流式数据摘要。但流式模糊哈希值有一定的缺陷性。
+由于网络中音视频流量的不完整传输,流式模糊哈希值计算的音视频文件的完整度往往较低,无法对完整度较低的文件进行彻底的分析。因此流式模糊哈希值之间的相似不具有传递性。举个例子,H1,H2,H3分别是同一个音视频文件的模糊哈希值,但由于用户的观看习惯,它们分别对应的完整度不同,分别是80\%,100\%和80\%。H1和H2相似,H2和H3相似,但H1和H3不相似。其原因是H1和H3的两个音视频文件完整度较低,导致其编辑距离较低。该特性对下面介绍的文件编号的计算和漏报误报的统计产生了巨大的影响。
+
+\section{基于流式模糊哈希的重复音视频检测方法}
+\subsection{步骤}
+% 上文提到了预测性文件标识的生成,那如何对重复音视频进行检测呢?步骤如下,
+在音视频传输刚刚建立时,生成音视频文件标识,若在数据集中查询到该文件标识,则认为该音视频文件重复,并告知内容分析模块无需继续进行内容审计,若未能在数据集中查询到该标识,则认为该文件不重复,数据库记录该音视频文件的标识和内容分析模块的审计结果。
+
+在查询文件标识的同时,该方法也会计算文件的流式模糊哈希值,用以生成文件编号,文件编号是文件标识评价的关键参数之一。文件标识评价采用两个指标,分别为漏报率和误报率。文件编号、误报率和漏报率的计算将在下文详细说明。
+
+整个思路的具体流程如图~\ref{fig:Flowchartofpredictiveprogram}~所示。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{Flowchartofpredictiveprogram}
+ \bicaption{基于流式模糊哈希的重复音视频检测方法流程图}{Flow chart of Repeated multimedia detection method}
+ \label{fig:Flowchartofpredictiveprogram}
+\end{figure}
+
+\subsection{音视频文件编号}
+% 在介绍整个预测性文件标识的评价体系前,先要介绍一下文件编号的生成,
+文件编号是一个基准值,对于同一个文件其文件编号相同,文件编号由流式模糊哈希值生成。
+
+文件编号计算的思路如下:主要是通过流式模糊哈希,将相似的音视频文件赋予相同的文件编号,前文提到过流式模糊哈希的相似不具有传递性,因此需要选出一个合适的流式模糊哈希值作为基准。例如在上文提到的例子若以H1和H3作为基准模糊哈希值,则会出现两个不同的音视频文件编号。若以H2作为基准模糊哈希值,则只出现唯一的音视频文件编号。因此,需要找到一个基准的流式模糊哈希值,再对文件进行编号。
+
+基准的哈希值应为完整度最高的音视频流量所生成的哈希值。若完整度最高的基准流量摘要存在多条,则选取与其他流量相似度之和最高的哈希值。
+
+下面是计算文件编号的具体步骤如下:
+
+步骤一:将该段时间产生的所有音视频流量的模糊哈希值存入基于流式模糊哈希的相似性查找系统,n=1。
+
+步骤二:获取第n条哈希值,若发现其已经有音视频标识则n=n+1,再次执行步骤二,否则执行步骤三。
+
+步骤三:将第n条哈希值在系统中查询,获取所有相似的音视频,并在这些相似的音视频中获取基准流式模糊哈希值,再次查询该基准流式模糊哈希值,其结果打上音视频标识n。若为最后一条执行结束计算,否则n=n+1,执行步骤二。
+
+如图~\ref{fig:numberflow}~为整个音视频文件编号的流程图。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.60\textwidth]{numberflow}
+ \bicaption{音视频文件编号流程图}{multimedia file number generate}
+ \label{fig:numberflow}
+\end{figure}
+
+\subsection{预测性文件标识评价}
+
+本文采用基于流式模糊哈希值的方法对文件标识进行评价,采用两个评价指标:一是漏报率,漏报率=漏报的音视频文件数/总音视频文件数。这里的漏报是指:预测性文件标识不相同,但文件编号相同。二是误报率,误报率=误报的音视频文件数/总音视频文件数。这里的误报是指:预测性文件标识相同,但文件编号不同。由于代理服务器等网络中间件有着色情检测、版权保护的作用,因此对于重复音视频文件检测系统来说,其误报率要比漏报率重要。
+
+\section{重复音视频检测方法的可行性验证}
+
+使用上文提到的数据集,使用完整度80\%以上的数据对第3章最后生成的6个文件标识的效果进行了验证,计算它们的误报率和漏报率。
+
+\subsection{预测性文件标识的误报率}
+
+误报的定义是预测性文件标识相同,但文件编号不同,其结果如表~\ref{tab:Resultsoffalse}~所示。
+\begin{table}[!htbp]
+ \bicaption{误报率结果}{Results of false positive rate}
+ \label{tab:Resultsoffalse}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lccccccc}
+ \hline
+ 音视频文件头部nK生成的标识&0K&1K&2K&4K&8K&16K&32K\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ 误报率&16.8\%&7.4\%&3.2\%&1.4\%&0.8\%&0.39\%&0.34\%\\
+ \hline
+ \end{tabular}
+\end{table}
+
+当生成标识的音视频文件头部内容增加到32k,文件标识的误报率降低到0.34\%,远远高于传统的机器学习方法的准确率。随着选取的音视频文件头部的长度越来越长,其误报率呈下降趋势。这样的结果也验证了随着熵的增高,其特征的效果越好的设想,因此信息熵是一种较好的评价文件标识的标准。
+
+\subsection{预测性文件标识的漏报率}
+漏报的定义是预测性文件标识不相同,但文件编号相同,其结果如表~\ref{tab:Resultsoffalse}~:
+
+\begin{table}[!htbp]
+ \bicaption{漏报率结果}{Results of missing rate}
+ \label{tab:ResultsofMissing}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lccccccc}
+ \hline
+ 音视频文件头部nK生成的标识&0K&1K&2K&4K&8K&16K&32K\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ 漏报率&67\%&66\%&65\%&66\%&66\%&65\%&62\%\\
+ \hline
+ \end{tabular}
+\end{table}
+
+随着获取的音视频文件头部的长度越来越长,其漏报率也呈下降趋势,但趋势并不明显,当选取的文件头部内容长度已经达到32K时,但其漏报率依旧高达62\%。这又是什么原因呢?
+
+通过对漏报数据的观察,发现大量的出现漏报的音视频数据其Etag和LastModified标签相同,主要其ServerIP地址不同。这可能是因为某些大型网站使用了CDN服务,同一音视频资源在网络环境中存在多个ServerIP不同的镜像。同时目前很多网站式URL是动态生成的,其不同的URL会标记相同的视频资源,文件标识的生成选取URL作为特征也可能是漏报率较高的原因。
+
+本文上章提到过,由于单向流的影响,很多音视频流量无法获取其URL,或者获取URL的开销极大。同时很多网站式URL是动态生成的,如果能够在生成文件标识时,不采用URL作为候选特征,将减少网络中间件单向流对准的开销。
+
+为了解漏报率产生的原因,同时减少网络中间件单向流对准的开销,本文重新生成了三个音视频标识,并计算其漏报率,三个标识分别是:去除ServerIP的文件标识,去除URL的标识,和同时去除URL和ServerIP的标识,三个标识的音视频文件头部长度都为32K。实验结果如表~\ref{tab:PredictiveDocument}~所示:
+\begin{table}[!htbp]
+ \bicaption{漏报率结果}{Missing rate results}
+ \label{tab:PredictiveDocument}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lcc}
+ \hline
+ 文件标识&误报率&漏报率\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ 去除ServerIP的文件标识&0.32\%&10\% \\
+ 去除URL的文件标识&0.37\%&60\% \\
+ 去除ServerIP和URL的文件标识&0.35\%&7\% \\
+ \hline
+ \end{tabular}
+\end{table}
+
+根据实验结果,去除ServerIP的文件标识比未去除ServerIP的文件标识漏报率降低了约50\%,误报率并未发生明显变化,可见ServerIP作为生成文件标识的特征是漏报率高达60\%的主要原因,生成预测性文件标识时不应该把ServerIP作为候选特征。同时,去除ServerIP和URL的文件标识其误报率和漏报率未发生明显变化,且大大减少了网络中间件单向流对准的开销,因此通过实验得出了最优的文件标识候选特征,分别为Etag、MediaLen、Last-Modified、头部文件32K(其中Etag和Last-Modified任选其一)。
+
+根据上文的音视频特征的信息熵计算结果显示,ServerIP和URL其信息熵低。这也验证了随着熵的增高,其特征的效果越好的设想。
+\section{小结}
+本章首先介绍了单向流相关的背景知识,指出了流式模糊哈希不具有传递性及其对文件编号计算的影响。然后介绍了基于流式模糊哈希的重复音视频检测方法,使用文件标识对音视频文件进行重复性检测的同时计算该音视频的流式模糊哈希值以生成文件编号。接着提出了两个文件标识的评价标准:漏报率和误报率。最后对文件标识的效果进行了验证,结果证明生成音视频文件标识的特征效果最好的是:Etag、MediaLen、Last-Modified、头部文件32K。
diff --git a/Tex/Chap_5.tex b/Tex/Chap_5.tex
new file mode 100644
index 0000000..6ddd2e3
--- /dev/null
+++ b/Tex/Chap_5.tex
@@ -0,0 +1,87 @@
+\chapter{系统设计与实现}\label{chap:introduction}
+
+本章介绍面向网络流量的重复音视频实时检测系统的概述和测试。
+
+\section{系统概述}
+
+系统的输入是在线的网络音视频流量。整个系统由两个模块组成,重复文件实时检测和文件标识评价,整个系统的实现框架图如图~\ref{fig:systemframework}~所示:
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{systemframework}
+ \bicaption{面向网络流量的重复音视频实时检测系统框架图}{multimedia repeated files detection system framework}
+ \label{fig:systemframework}
+\end{figure}
+
+当音视频流量传输时,该系统会生成一个具有预测性的文件标识,并在数据集中进行查询,若该标识在数据集中查询到,则告知内容分析模块文件重复,若在数据集中查询不到该标识,数据集记录新生成的文件标识。该系统在生成标识的同时,抽取10\%的音视频文件,计算其流式模糊哈希值并记录。文件标识评价模块会通过记录下的流式模糊哈希值对文件标识进行评价,优化文件标识的生成。
+
+\section{系统测试}
+
+\subsection{测试原理}
+测试环境是在国内某公司网关,对音视频流量进行分光,流量副本通过整个重复音视频文件实时检测系统,并与原始流量进行对比。测试原理如图~\ref{fig:test}~所示。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{test}
+ \bicaption{系统测试原理}{System test principle}
+ \label{fig:test}
+\end{figure}
+
+\subsection{特定音视频文件召回率测试}
+对内容分析模块返回的特定音视频文件数进行对比并计算召回率,以检测整个重复音视频文件实时检测系统对内容分析模块的影响。其结果如表~\ref{tab:contentanalysis}~:
+\begin{table}[!htbp]
+ \bicaption{内容分析模块特定音视频文件召回率}{Specific multimedia files recall}
+ \label{tab:contentanalysis}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lc}
+ \hline
+ 完整度&特定音视频文件召回率\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ 80\%&99.4\%\\
+ 95\%&94.7\%\\
+ \hline
+ \end{tabular}
+\end{table}
+
+根据实验结果,经过重复音视频文件实时检测系统的流量内容分析模块特定音视频文件召回率高于95\%,因此,去重机制未影响内容分析模块准确性。
+
+\subsection{系统去重效果测试}
+接着又对整个系统的去重效果进行了测试其结果如下:
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{result_a}
+ \bicaption{完整度80\%文件数去重}{Deduplication rate by file number(file transfer completeness 80\%)}
+ \label{fig:result_a}
+\end{figure}
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{result_b}
+ \bicaption{完整度95\%文件数去重}{Deduplication rate by file number(file transfer completeness 95\%)}
+ \label{fig:result_b}
+\end{figure}
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{result_c}
+ \bicaption{完整度80\%文件带宽去重}{Deduplication rate by file size(file transfer completeness 80\%)}
+ \label{fig:result_c}
+\end{figure}
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[width=0.80\textwidth]{result_d}
+ \bicaption{完整度95\%文件带宽去重}{Deduplication rate by file size(file transfer completeness 95\%)}
+ \label{fig:result_d}
+\end{figure}
+
+如图~\ref{fig:result_a}~所示,完整度80\%的文件数平均的去重率约为65\%,文件数的去重率=发现的重复文件数/总文件数。如图~\ref{fig:result_b}~所示,完整度95\%的文件数平均的去重率约为59\%。如图~\ref{fig:result_c}~所示,完整度80\%的文件的带宽去重率约为22\%,文件带宽的去重率=去重后带宽/未去重带宽。如图~\ref{fig:result_d}~所示,完整度95\%的文件数平均的去重率约为21.4\%。可见系统的去重效果显著,可以减少内容分析机20\%的带宽消耗和对60\%文件的重复分析。
+
+\section{小结}
+本章首先介绍了整个重复音视频文件实时检测系统的概述,并对该系统对特定音视频文件的召回率和去重效果进行了测试。系统的去重机制未影响内容分析模块准确性,且去重效果显著,文件数去重率约为60\%以上,带宽去重率约为20\%。
+
diff --git a/Tex/Chap_6.tex b/Tex/Chap_6.tex
new file mode 100644
index 0000000..d0aaad4
--- /dev/null
+++ b/Tex/Chap_6.tex
@@ -0,0 +1,42 @@
+\chapter{总结与展望}\label{chap:introduction}
+
+前面的章节详细介绍了开展本论文的背景与意义、国内外研究现状、面向网络流量的重复音视频文件实时检测系统的两个关键技术以及整个系统的设计和实现,本章对以上研究内容进行一次全面的总结,对研究过程中没有考虑过的问题进行详细分析,对下一步研究工作做出合理的展望。
+
+\section{全文总结}
+越来越多的人通过网络观看和下载音视频节目,这也使得音视频流量成为网络流量中占比最高的流量。随着音视频流量快速增长,对音视频流量的重复性检测也越来越受到广泛关注。在学术界,有大量的工作研究网络冗余发现,用以减少整个网络环境的资源开销。
+
+面向数据链路层的网络冗余发现系统,面对的挑战是数据链路层网络的不稳定性和IP数据的比例下降的特性。链路层的冗余发现系统,主要部署在蜂窝数据网络和无线局域网。代表性的工作有\cite{Lumezanu2010The}和\cite{Halepovic2012}。
+
+面向网络层的网络冗余发现系统,面对的挑战是检测的准确性和高效性。网络层的冗余发现系统通常是基于端到端的模型,通过不同的网络冗余发现算法对流量的冗余进行检测,其中较为出名的算法是PACK、MODP和拉宾指纹。代表性的工作有\cite{Spring2000}和\cite{Yu}。
+
+面向应用层的网络冗余发现系统,面对的挑战是加密流量的兴起,其主要应对不同的应用场景如移动设备、企业内部网络、云计算等。代表性的工作有\cite{Aggarwal}和\cite{Feng2013How}。
+
+本文的研究目标是针对网络中间件消耗大量资源处理重复音视频文件的问题,设计并实现面向网络流量的重复音视频实时检测系统。该系统有两个要求:一是检测的实时性。二是算法的准确性。
+
+本文的两个工作基础,分别是流式模糊哈希和基于流式模糊哈希的大规模相似性查找系统,流式模糊哈希主要是在真实环境中生成音视频流量的摘要,作为基准用以生成文件编号。而相似性查找系统主要是为了计算两个流式模糊哈希值之间的编辑距离,评价两个文件之间的相似性。
+
+整个重复音视频实时检测系统主要基于的两个技术:
+
+1)预测性文件标识生成方法。通过重复音视频文件预测实验,传统的机器学习方法准确率和召回率结果不理想,不适用重复音视频文件实时检测。预测性文件标识生成方法采用信息熵和互信息作为评价标准,对生成文件标识的特征进行筛选,并在流量建立连接时,生成一个具有预测性的文件标识。
+
+2)基于流式模糊哈希的重复音视频检测方法。若文件标识可在数据库中查询到,则认为该音视频文件是重复的,同时通过流式模糊哈希值生成文件编号,通过两个指标验证文件标识的效果,分别为误报率漏报率。误报率漏报率的计算结果可用于指导文件标识的特征选择。通过实验得出了最优的文件标识候选特征,分别为Etag、MediaLen、Last-Modified、头部文件32K(其中Etag和Last-Modified任选其一)。
+
+基于以上的两个技术,本文实现了重复音视频实时检测系统,该系统去重效果显著,文件数去重率约为60\%以上,带宽去重率约为20\%。经过该系统的流量内容分析模块特定音视频文件召回率高于95\%,未影响内容分析模块准确性。
+
+本文研究的完成以下创新:
+
+1)提出了一种预测性文件标识生成方法,该方法选取音视频流量头部的一些特征,以及文件头部的一些信息,通过熵和互信息对这些特征进行筛选,最后通过哈希算法生成一个具有预测性的文件标识。
+
+2)提出了一种基于流式模糊哈希的重复音视频检测方法,在通过文件标识检测音视频文件重复性的同时,也会计算文件的流式模糊哈希值,用以生成文件编号,指导文件标识的生成。
+
+3)实现了重复音视频实时检测系统,真实网络环境实验结果表明,该系统的去重机制未影响内容分析模块准确性,且去重效果显著。
+
+\section{研究展望}
+
+本文的研究工作存在以下几个方面需要进一步完善:
+
+1)目前系统对于不同的文件标识,设计的生命周期为24小时。但测试中发现许多热点音视频的文件标识的生命周期可以延长到十几天甚至几个月,是否可以针对不同的文件,生成不同的标识生命周期?采用什么方法对文件标识的生命周期进行预测?
+
+2)目前系统的文件标识评价模块,采用半离线的方式,抽取10\%的音视频流量,并计算其流式模糊哈希值,从而指导文件标号的生成。该模块应对概念漂移能力不理想,是否可以通过某些方法,实时地预测文件标识的准确性?
+
+3)基于流式模糊哈希的大规模相似性查找系统在面对海量的流式模糊哈希查找时,耗费的内存高,无法在内存集约的环境下使用。是否可以对该查找系统进行改进,降低其内存消耗。 \ No newline at end of file
diff --git a/Tex/Chap_Guide.tex b/Tex/Chap_Guide.tex
new file mode 100644
index 0000000..73d06f1
--- /dev/null
+++ b/Tex/Chap_Guide.tex
@@ -0,0 +1,342 @@
+\chapter{\LaTeX{}使用说明}\label{chap:guide}
+
+为方便使用及更好地展示\LaTeX{}排版的优秀特性,ucasthesis的框架和文件体系进行了细致地处理,尽可能地对各个功能和板块进行了模块化和封装,对于初学者来说,众多的文件目录也许一开始让人觉得有些无所适从,但阅读完下面的使用说明后,会发现原来使用思路是简单而清晰的,而且,当对\LaTeX{}有一定的认识和了解后,会发现其相对Word类排版系统极具吸引力的优秀特性。所以,如果是初学者,请不要退缩,请稍加尝试和坚持,以领略到\LaTeX{}的非凡魅力,并可以通过阅读相关资料如\LaTeX{} Wikibook \citep{wikibook2014latex} 来完善自己的使用知识。
+
+\section{先试试效果}
+
+\begin{enumerate}
+ \item 安装软件:根据所用操作系统和章节~\ref{sec:system}中的信息安装\LaTeX{}编译环境。
+ \item 获取模板:下载 \href{https://github.com/mohuangrui/ucasthesis}{ucasthesis} 模板并解压。ucasthesis模板不仅提供了相应的类文件,同时也提供了包括参考文献等在内的完成学位论文的一切要素,所以,下载时,推荐下载整个ucasthesis文件夹,而不是单独的文档类。
+ \item 编译模板:
+ \begin{enumerate}
+ \item Windows:双击运行artratex.bat脚本。
+ \item Linux或MacOS: {\scriptsize \verb|terminal| -> \verb|chmod +x ./artratex.sh| -> \verb|./artratex.sh xa|}
+ \item 任意系统:都可使用\LaTeX{}编辑器打开Thesis.tex文件并选择xelatex编译引擎进行编译。
+ \end{enumerate}
+ \item 错误处理:若编译中遇到了问题,请先查看“常见问题”(章节~\ref{sec:qa})。
+\end{enumerate}
+
+编译完成即可获得本PDF说明文档。而这也完成了学习使用ucasthesis撰写论文的一半进程。什么?这就学成一半了,这么简单???,是的,就这么简单!
+
+\section{文档目录简介}
+
+\subsection{Thesis.tex}
+
+Thesis.tex为主文档,其设计和规划了论文的整体框架,通过对其的阅读可以了解整个论文框架的搭建。
+
+\subsection{编译脚本}
+
+\begin{itemize}
+ \item Windows:双击Dos脚本artratex.bat可得全编译后的PDF文档,其存在是为了帮助不了解\LaTeX{}编译过程的初学者跨过编译这第一道坎,请勿通过邮件传播和接收此脚本,以防范Dos脚本的潜在风险。
+ \item Linux或MacOS:在terminal中运行
+ \begin{itemize}
+ \item \verb|./artratex.sh xa|:获得全编译后的PDF文档
+ \item \verb|./artratex.sh x|:快速编译模式
+ \end{itemize}
+ \item 全编译指运行 \verb|xelatex+bibtex+xelatex+xelatex| 以正确生成所有的引用链接,如目录,参考文献及引用等。在写作过程中若无添加新的引用,则可用快速编译,即只运行一遍\LaTeX{}编译引擎以减少编译时间。
+\end{itemize}
+
+\subsection{Tmp文件夹}
+
+运行编译脚本后,编译所生成的文档皆存于Tmp文件夹内,包括编译得到的PDF文档,其存在是为了保持工作空间的整洁,因为好的心情是很重要的。
+
+\subsection{Style文件夹}
+
+包含ucasthesis文档类的定义文件和配置文件,通过对它们的修改可以实现特定的模版设定。若需更新模板,一般只需用新的样式文件替换旧的即可。
+
+\begin{enumerate}
+ \item ucasthesis.cls:文档类定义文件,论文的最核心的格式即通过它来定义的。
+ \item ucasthesis.cfg:文档类配置文件,设定如目录显示为“目~录”而非“目录”。
+ \item artratex.sty: 常用宏包及文档设定,如参考文献样式、文献引用样式、页眉页脚设定等。这些功能具有开关选项,常只需在Thesis.tex中的如下命令中进行启用即可,一般无需修改artratex.sty本身。
+
+ \path{\usepackage[options]{artratex}}
+ \item artracom.sty:自定义命令以及添加宏包的推荐放置位置。
+\end{enumerate}
+
+\subsection{Tex文件夹}
+
+文件夹内为论文的所有实体内容,正常情况下,这也是\textbf{使用ucasthesis撰写学文论文时,主要关注和修改的一个位置,注:所有文件都必须采用UTF-8编码,否则编译后将出现乱码文本},详细分类介绍如下:
+
+\begin{itemize}
+ \item Frontpage.tex:为论文中英文封面及中英文摘要。\textbf{论文封面会根据英文学位名称如Bachelor,Master,或是Doctor自动切换为相应的格式}。
+ \item Mainmatter.tex:索引需要出现的Chapter。开始写论文时,可以只索引当前章节,以快速编译查看,当论文完成后,再对所有章节进行索引即可。
+ \item Chap{\_}xxx.tex:为论文主体的各个章节,可根据需要添加和撰写。
+ \item Appendix.tex:为附录内容
+ \item Backmatter.tex:为发表文章信息和致谢部分等。
+\end{itemize}
+
+\subsection{Img文件夹}
+
+用于放置论文中所需要的图类文件,支持格式有:.jpg, .png, .pdf。其中,\verb|ucas_logo.pdf|为国科大校徽。不建议为各章节图片建子目录,即使图片众多,若命名规则合理,图片查询亦是十分方便。
+
+\subsection{Biblio文件夹}
+
+\begin{enumerate}
+ \item ref.bib:参考文献信息库。
+ \item gbt7714-xxx.bst:文献样式定义文件。由 \href{https://github.com/zepinglee}{zepinglee} 开发,在最新国标的基础上对ucas进行了定制。与文献样式有关的问题,请查阅开发者所提供的文档,并建议适当追踪 \href{https://github.com/CTeX-org/gbt7714-bibtex-style/tree/ucas}{ucas 样式分支}的更新。
+\end{enumerate}
+
+\section{数学公式、图表、参考文献等功能}
+
+\subsection{数学公式}
+
+比如Navier-Stokes方程(方程~\eqref{eq:ns})(times text test: 1,2,3,4,5, times math test: $\mathrm{1,2,3,4,5}, 1,2,3,4,5$):
+\begin{equation} \label{eq:ns}
+ \adddotsbeforeeqnnum%
+ \begin{cases}
+ \frac{\partial \rho}{\partial t} + \nabla\cdot(\rho\Vector{V}) = 0 \ \mathrm{times\ math\ test: 1,2,3,4,5}, 1,2,3,4,5\\
+ \frac{\partial (\rho\Vector{V})}{\partial t} + \nabla\cdot(\rho\Vector{V}\Vector{V}) = \nabla\cdot\Tensor{\sigma} \ \text{times text test: 1,2,3,4,5}\\
+ \frac{\partial (\rho E)}{\partial t} + \nabla\cdot(\rho E\Vector{V}) = \nabla\cdot(k\nabla T) + \nabla\cdot(\Tensor{\sigma}\cdot\Vector{V})
+ \end{cases}
+\end{equation}
+\begin{equation}
+ \adddotsbeforeeqnnum%
+ \frac{\partial }{\partial t}\int\limits_{\Omega} u \, \mathrm{d}\Omega + \int\limits_{S} \unitVector{n}\cdot(u\Vector{V}) \, \mathrm{d}S = \dot{\phi}
+\end{equation}
+\[
+ \begin{split}
+ \mathcal{L} \{f\}(s) &= \int _{0^{-}}^{\infty} f(t) e^{-st} \, \mathrm{d}t, \
+ \mathscr{L} \{f\}(s) = \int _{0^{-}}^{\infty} f(t) e^{-st} \, \mathrm{d}t\\
+ \mathcal{F} {\bigl (} f(x+x_{0}) {\bigr )} &= \mathcal{F} {\bigl (} f(x) {\bigr )} e^{2\pi i\xi x_{0}}, \
+ \mathscr{F} {\bigl (} f(x+x_{0}) {\bigr )} = \mathscr{F} {\bigl (} f(x) {\bigr )} e^{2\pi i\xi x_{0}}
+ \end{split}
+\]
+
+数学公式常用命令请见 \href{https://en.wikibooks.org/wiki/LaTeX/Mathematics}{WiKibook Mathematics}。artracom.sty中对一些常用数据类型如矢量矩阵等进行了封装,这样的好处是如有一天需要修改矢量的显示形式,只需单独修改artracom.sty中的矢量定义即可实现全文档的修改。
+
+\subsection{数学环境}
+
+\begin{axiom}
+ 这是一个公理。
+\end{axiom}
+\begin{theorem}
+ 这是一个定理。
+\end{theorem}
+\begin{lemma}
+ 这是一个引理。
+\end{lemma}
+\begin{corollary}
+ 这是一个推论。
+\end{corollary}
+\begin{assertion}
+ 这是一个断言。
+\end{assertion}
+\begin{proposition}
+ 这是一个命题。
+\end{proposition}
+\begin{proof}
+ 这是一个证明。
+\end{proof}
+\begin{definition}
+ 这是一个定义。
+\end{definition}
+\begin{example}
+ 这是一个例子。
+\end{example}
+\begin{remark}
+ 这是一个注。
+\end{remark}
+
+\subsection{表格}
+
+请见表~\ref{tab:sample}。
+\begin{table}[!htbp]
+ \bicaption{这是一个样表。}{This is a sample table.}
+ \label{tab:sample}
+ \centering
+ \footnotesize% fontsize
+ \setlength{\tabcolsep}{4pt}% column separation
+ \renewcommand{\arraystretch}{1.2}%row space
+ \begin{tabular}{lcccccccc}
+ \hline
+ 行号 & \multicolumn{8}{c}{跨多列的标题}\\
+ %\cline{2-9}% partial hline from column i to column j
+ \hline
+ Row 1 & $1$ & $2$ & $3$ & $4$ & $5$ & $6$ & $7$ & $8$\\
+ Row 2 & $1$ & $2$ & $3$ & $4$ & $5$ & $6$ & $7$ & $8$\\
+ Row 3 & $1$ & $2$ & $3$ & $4$ & $5$ & $6$ & $7$ & $8$\\
+ Row 4 & $1$ & $2$ & $3$ & $4$ & $5$ & $6$ & $7$ & $8$\\
+ \hline
+ \end{tabular}
+\end{table}
+
+制图制表的更多范例,请见 \href{https://github.com/mohuangrui/ucasthesis/wiki}{ucasthesis 知识小站} 和 \href{https://en.wikibooks.org/wiki/LaTeX/Tables}{WiKibook Tables}。
+
+\subsection{图片插入}
+
+论文中图片的插入通常分为单图和多图,下面分别加以介绍:
+
+单图插入:假设插入名为\verb|tc_q_criteria|(后缀可以为.jpg、.png、.pdf,下同)的图片,其效果如图\ref{fig:tc_q_criteria}。
+\begin{figure}[!htbp]
+ \centering
+ \includegraphics[width=0.40\textwidth]{tc_q_criteria}
+ \bicaption{Q判据等值面图,同时测试一下一个很长的标题,比如这真的是一个很长很长很长很长很长很长很长很长的标题。}{Isocontour of Q criteria, at the same time, this is to test a long title, for instance, this is a really very long very long very long very long very long title.}
+ \label{fig:tc_q_criteria}
+\end{figure}
+
+如果插图的空白区域过大,以图片\verb|shock_cyn|为例,自动裁剪如图\ref{fig:shock_cyn}。
+\begin{figure}[!htbp]
+ \centering
+ %trim option's parameter order: left bottom right top
+ \includegraphics[trim = 30mm 0mm 30mm 0mm, clip, width=0.40\textwidth]{shock_cyn}
+ \bicaption{激波圆柱作用。}{Shock-cylinder interaction.}
+ \label{fig:shock_cyn}
+\end{figure}
+
+多图的插入如图\ref{fig:oaspl},多图不应在子图中给文本子标题,只要给序号,并在主标题中进行引用说明。
+\begin{figure}[!htbp]
+ \centering
+ \begin{subfigure}[b]{0.35\textwidth}
+ \includegraphics[width=\textwidth]{oaspl_a}
+ \caption{}
+ \label{fig:oaspl_a}
+ \end{subfigure}%
+ ~% add desired spacing
+ \begin{subfigure}[b]{0.35\textwidth}
+ \includegraphics[width=\textwidth]{oaspl_b}
+ \caption{}
+ \label{fig:oaspl_b}
+ \end{subfigure}
+ \\% line break
+ \begin{subfigure}[b]{0.35\textwidth}
+ \includegraphics[width=\textwidth]{oaspl_c}
+ \caption{}
+ \label{fig:oaspl_c}
+ \end{subfigure}%
+ ~% add desired spacing
+ \begin{subfigure}[b]{0.35\textwidth}
+ \includegraphics[width=\textwidth]{oaspl_d}
+ \caption{}
+ \label{fig:oaspl_d}
+ \end{subfigure}
+ \bicaption{总声压级。(a) 这是子图说明信息,(b) 这是子图说明信息,(c) 这是子图说明信息,(d) 这是子图说明信息。}{OASPL.(a) This is the explanation of subfig, (b) This is the explanation of subfig, (c) This is the explanation of subfig, (d) This is the explanation of subfig.}
+ \label{fig:oaspl}
+\end{figure}
+
+\subsection{算法}
+
+如见算法~\ref{alg:euclid},详细使用方法请参见文档 \href{https://ctan.org/pkg/algorithmicx?lang=en}{algorithmicx}。
+
+\begin{algorithm}[!htbp]
+ \small
+ \caption{Euclid's algorithm}\label{alg:euclid}
+ \begin{algorithmic}[1]
+ \Procedure{Euclid}{$a,b$}\Comment{The g.c.d. of a and b}
+ \State $r\gets a\bmod b$
+ \While{$r\not=0$}\Comment{We have the answer if r is 0}
+ \State $a\gets b$
+ \State $b\gets r$
+ \State $r\gets a\bmod b$
+ \EndWhile\label{euclidendwhile}
+ \State \textbf{return} $b$\Comment{The gcd is b}
+ \EndProcedure
+ \end{algorithmic}
+\end{algorithm}
+
+\subsection{参考文献引用}
+
+参考文献引用过程以实例进行介绍,假设需要引用名为"Document Preparation System"的文献,步骤如下:
+
+1)使用Google Scholar搜索Document Preparation System,在目标条目下点击Cite,展开后选择Import into BibTeX打开此文章的BibTeX索引信息,将它们copy添加到ref.bib文件中(此文件位于Biblio文件夹下)。
+
+2)索引第一行 \verb|@article{lamport1986document,|中 \verb|lamport1986document| 即为此文献的label (\textbf{中文文献也必须使用英文label},一般遵照:姓氏拼音+年份+标题第一字拼音的格式),想要在论文中索引此文献,有两种索引类型:
+
+文本类型:\verb|\citet{lamport1986document}|。正如此处所示 \citet{lamport1986document};
+
+括号类型:\verb|\citep{lamport1986document}|。正如此处所示 \citep{lamport1986document}。
+
+\textbf{多文献索引用英文逗号隔开}:
+
+\verb|\citep{lamport1986document, chu2004tushu, chen2005zhulu}|。正如此处所示 \citep{lamport1986document, chu2004tushu, chen2005zhulu}
+
+更多例子如:
+
+\citet{walls2013drought} 根据 \citet{betts2005aging} 的研究,首次提出...。其中关于... \citep{walls2013drought, betts2005aging},是当前中国...得到迅速发展的研究领域 \citep{chen1980zhongguo, bravo1990comparative}。引用同一著者在同一年份出版的多篇文献时,在出版年份之后用
+英文小写字母区别,如:\citep{yuan2012lana, yuan2012lanb, yuan2012lanc}。同一处引用多篇文献时,按出版年份由近及远依次标注,中间用
+分号分开。例如 \citep{chen1980zhongguo, stamerjohanns2009mathml, hls2012jinji, niu2013zonghe}。
+
+使用著者-出版年制(authoryear)式参考文献样式时,中文文献必须在BibTeX索引信息的 \textbf{key} 域(请参考ref.bib文件)填写作者姓名的拼音,才能使得文献列表按照拼音排序。参考文献表中的条目(不排序号),先按语种分类排列,语种顺 序是:中文、日文、英文、俄文、其他文种。然后,中文按汉语拼音字母顺序排列,日文按第一著者的姓氏笔画排序,西文和 俄文按第一著者姓氏首字母顺序排列。如中 \citep{niu2013zonghe}、日 \citep{Bohan1928}、英 \citep{stamerjohanns2009mathml}、俄 \citep{Dubrovin1906}。
+
+如此,即完成了文献的索引,请查看下本文档的参考文献一章,看看是不是就是这么简单呢?是的,就是这么简单!
+
+不同文献样式和引用样式,如著者-出版年制(authoryear)、顺序编码制(numbers)、上标顺序编码制(super)可在Thesis.tex中对artratex.sty调用实现,如:
+\begin{itemize}
+ \footnotesize
+ \item \verb+\usepackage[numbers]{artratex}+ $\%$ 文本: Jones [1]; 括号: [1]
+ \item \verb+\usepackage[super]{artratex}+ $\%$ 文本: Jones 上标[1]; 括号: 上标[1]
+ \item \verb+\usepackage[authoryear]{artratex}+ $\%$ 文本: Jones (1995); 括号: (Jones, 1995)
+ \item \verb+\usepackage[alpha]{artratex}+ $\%$ 文本: 不可用; 括号: [Jon95]
+\end{itemize}
+
+当前文档的默认参考文献样式为\textbf{authoryear}。若在上标(\textbf{super})模式下,希望在特定位置将上标改为嵌入式标,可使用
+
+文本类型:\verb|\citetns{lamport1986document,chen2005zhulu}|。
+
+正如此处所示 \citetns{lamport1986document,chen2005zhulu}
+
+括号类型:\verb|\citepns{lamport1986document,chen2005zhulu}|。
+
+正如此处所示 \citepns{lamport1986document,chen2005zhulu}
+
+参考文献索引更为详细的信息,请见 \href{https://en.wikibooks.org/wiki/LaTeX/Bibliography_Management}{WiKibook Bibliography}。
+
+
+\section{常见使用问题}\label{sec:qa}
+
+\begin{enumerate}
+ \item 模板每次发布前,都已在Windows,Linux,MacOS系统上测试通过。下载模板后,若编译出现错误,则请见 \href{https://github.com/mohuangrui/ucasthesis/wiki}{ucasthesis知识小站} 的 \href{https://github.com/mohuangrui/ucasthesis/wiki/%E7%BC%96%E8%AF%91%E6%8C%87%E5%8D%97}{编译指南}。
+
+ \item 模板文档的编码为UTF-8编码。所有文件都必须采用UTF-8编码,否则编译后生成的文档将出现乱码文本。若出现文本编辑器无法打开文档或打开文档乱码的问题,请检查编辑器对UTF-8编码的支持。如果使用WinEdt作为文本编辑器(\textbf{不推荐使用}),应在其Options -> Preferences -> wrapping选项卡下将两种Wrapping Modes中的内容:
+
+ TeX;HTML;ANSI;ASCII|DTX...
+
+ 修改为:TeX;\textbf{UTF-8|ACP;}HTML;ANSI;ASCII|DTX...
+
+ 同时,取消Options -> Preferences -> Unicode中的Enable ANSI Format。
+
+ \item 推荐选择xelatex或lualatex编译引擎编译中文文档。编译脚本的默认设定为xelatex编译引擎。你也可以选择不使用脚本编译,如直接使用 \LaTeX{}文本编辑器编译。注:\LaTeX{}文本编辑器编译的默认设定为pdflatex编译引擎,若选择xelatex或lualatex编译引擎,请进入下拉菜单选择。为正确生成引用链接,需要进行全编译。
+
+ \item Texmaker使用简介
+ \begin{enumerate}
+ \footnotesize
+ \item 使用 Texmaker “打开 (Open)” Thesis.tex。
+ \item 菜单 “选项 (Options)” -> “设置当前文档为主文档 (Define as Master Document)”
+ \item 菜单 “自定义 (User)” -> “自定义命令 (User Commands)” -> “编辑自定义命令 (Edit User Commands)” -> 左侧选择 “command 1”,右侧 “菜单项 (Menu Item)” 填入 Auto Build -> 点击下方“向导 (Wizard)” -> “添加 (Add)”: xelatex + bibtex + xelatex + xelatex + pdf viewer -> 点击“完成 (OK)”
+ \item 使用 Auto Build 编译带有未生成引用链接的源文件,可以仅使用 xelatex 编译带有已经正确生成引用链接的源文件。
+ \item 编译完成,“查看(View)” PDF,在PDF中 “ctrl+click” 可链接到相对应的源文件。
+ \end{enumerate}
+
+ \item 模版的设计可能地考虑了适应性。致谢等所有条目都是通过最为通用的
+
+ \verb+\chapter{item name}+ and \verb+\section*{item name}+
+
+ 来显式实现的 (请观察Backmatter.tex),从而可以随意添加,放置,和修改,如同一般章节。对于图表目录名称则可在ucasthesis.cfg中进行修改。
+
+ \item 设置文档样式: 在artratex.sty中搜索关键字定位相应命令,然后修改
+ \begin{enumerate}
+ \item 正文行距:启用和设置 \verb|\linespread{1.5}|,默认1.5倍行距。
+ \item 参考文献行距:修改 \verb|\setlength{\bibsep}{0.0ex}|
+ \item 目录显示级数:修改 \verb|\setcounter{tocdepth}{2}|
+ \item 文档超链接的颜色及其显示:修改 \verb|\hypersetup|
+ \end{enumerate}
+
+ \item 文档内字体切换方法:
+ \begin{itemize}
+ \item 宋体:国科大论文模板ucasthesis 或 \textrm{国科大论文模板ucasthesis}
+ \item 粗宋体:{\bfseries 国科大论文模板ucasthesis} 或 \textbf{国科大论文模板ucasthesis}
+ \item 黑体:{\sffamily 国科大论文模板ucasthesis} 或 \textsf{国科大论文模板ucasthesis}
+ \item 粗黑体:{\bfseries\sffamily 国科大论文模板ucasthesis} 或 \textsf{\bfseries 国科大论文模板ucasthesis}
+ \item 仿宋:{\ttfamily 国科大论文模板ucasthesis} 或 \texttt{国科大论文模板ucasthesis}
+ \item 粗仿宋:{\bfseries\ttfamily 国科大论文模板ucasthesis} 或 \texttt{\bfseries 国科大论文模板ucasthesis}
+ \item 楷体:{\itshape 国科大论文模板ucasthesis} 或 \textit{国科大论文模板ucasthesis}
+ \item 粗楷体:{\bfseries\itshape 国科大论文模板ucasthesis} 或 \textit{\bfseries 国科大论文模板ucasthesis}
+ \end{itemize}
+
+ \item 封面下划线上的文本不居中下划线,这是因为下划线前面还有字头,导致文本只能在页面居中和在下划线上居中二选一。当前封面采取页面居中。如需要调整文本在下划线上的位置,可用 \verb|\hspace{+/- n.0em}| 命令来插入或删除 n 个空格,进行手动调整,比如
+
+ \verb|\advisor{\hspace{+3.0em} xxx~研究员~xxx单位}|
+
+ 有时下划线看上去粗细不一致,这是显示的问题,打印正常。
+\end{enumerate}
+
+
diff --git a/Tex/Frontmatter.tex b/Tex/Frontmatter.tex
new file mode 100644
index 0000000..1e6376b
--- /dev/null
+++ b/Tex/Frontmatter.tex
@@ -0,0 +1,77 @@
+%---------------------------------------------------------------------------%
+%->> 封面信息及生成
+%---------------------------------------------------------------------------%
+%-
+%-> 中文封面信息
+%-
+\confidential{}% 密级:只有涉密论文才填写
+\schoollogo{scale=0.095}{ucas_logo}% 校徽
+\title{面向网络流量的重复音视频文件实时检测系统研究}% 论文中文题目
+\author{陈冠林}% 论文作者
+\advisor{刘庆云~正高级工程师}% 指导教师:姓名 专业技术职务 工作单位
+\advisors{中国科学院信息工程研究所}% 指导老师附加信息 或 第二指导老师信息
+\degree{工程硕士}% 学位:学士、硕士、博士
+\major{计算机技术}% 二级学科专业名称
+\institute{中国科学院信息工程研究所}% 院系名称
+\date{2019~年~6~月}% 毕业日期:夏季为6月、冬季为12月
+%-
+%-> 英文封面信息
+%-
+\TITLE{Research on Real-time Detection System \\ of \\ Repeated Multimedia Files for Network Traffic}% 论文英文题目
+\AUTHOR{Chen Guanlin}% 论文作者
+\ADVISOR{Supervisor: Professor Liu Qingyun}% 指导教师
+\DEGREE{Master}% 学位:Bachelor, Master, Doctor。封面格式将根据英文学位名称自动切换,请确保拼写准确无误
+\DEGREETYPE{Engineering}% 学位类别:Philosophy, Natural Science, Engineering, Economics, Agriculture 等
+\MAJOR{Computer Technology}% 二级学科专业名称
+\INSTITUTE{Institute of Information Engineering, Chinese Academy of Sciences}% 院系名称
+\DATE{June 2019}% 毕业日期:夏季为June、冬季为December
+%-
+%-> 生成封面
+%-
+\maketitle% 生成中文封面
+\MAKETITLE% 生成英文封面
+%-
+%-> 作者声明
+%-
+\makedeclaration% 生成声明页
+%-
+%-> 中文摘要
+%-
+\chapter*{摘\quad 要}\chaptermark{摘\quad 要}% 摘要标题
+\setcounter{page}{1}% 开始页码
+\pagenumbering{Roman}% 页码符号
+
+随着互联网的发展,网络已经渗入到我们生活的方方面面,越来越多的人使用网络观看和下载音视频节目,这也使得音视频流量已经成为网络流量中占比最高的部分。根据Sandvine发布的2018年全球互联网观察显示,视频流量已经占据全球流量的57.69\%,较上一年增长了22.43\%。
+
+据统计,YouTube流量中约有30\%的重复。大量重复的音视频文件通过网络进行传输,而代理服务器、入侵检测系统等网络中间件需要对这些流量处理,以达到版权保护、色情检测等目的。网络中间件对于重复流量的处理消耗了大量资源。
+
+为了解决网络中间件对重复音视频文件处理资源消耗的问题,本文构建了一套实时的重复音视频文件检测系统,该系统可以对网络流量中的重复音视频内容进行检测,用以指导网络中间件的处理,最终达到节约网络、存储和计算资源的目的。
+
+该系统使用以下技术:1)预测性文件标识生成方法。该技术采用信息熵和互信息作为评价标准,对生成文件标识的特征进行筛选,并在流量建立连接时,生成一个具有预测性的文件标识。 2)基于流式模糊哈希的重复音视频检测方法。该技术通过预测性文件标识检测文件的重复性,同时计算流式模糊哈希值生成文件编号,以验证标识的效果,并指导标识的特征选择。
+
+本文的创新点包括以下两点:
+
+1)提出了一种预测性文件标识生成方法,该方法在音视频传输初始阶段生成音视频文件标识。
+
+2)提出了一种基于流式模糊哈希的重复音视频检测方法,该方法可以检测完整的音视频文件的重复性,并指导预测性文件标识的生成。
+
+\keywords{网络流,重复文件,识别方法,预测性}% 中文关键词
+%-
+%-> 英文摘要
+%-
+\chapter*{Abstract}\chaptermark{Abstract}% 摘要标题
+
+With the development of the Internet, the network has penetrated into every aspect of our lives. More and more people use the network to watch and download multimedia program, which makes multimedia traffic account for the highest proportion of network traffic. According to the 2018 internet phenomena report released by Sandvine, video traffic has accounted for 57.69\% of global traffic and increased 22.43\% compared with last year.
+
+According to the statistics, around 30\% of the YouTube traffic is identified as redundant. There is a large number of repeated files in these multimedia traffic, and network middlebox like proxy servers and intrusion detection systems consume a lot of resources for processing these repeated multimedia traffic. In this paper, we construct a real-time repeated multimedia file detection system, which can detect the repeated multimedia files in network traffic and guide the processing of network middlebox.
+
+This system uses the following two technologies: 1) Predictive file identification generation method. This technology uses information entropy and mutual information as evaluation criteria filter the features of the generated file identifiers and generates a predictive file identification when the traffic establishes a connection. 2) A method of detecting repeated multimedia based on streaming fuzzy hashing. If the file identification is queried in the database, the multimedia file is considered to be repeated, and the file number is generated by the stream fuzzy hash value to verify the effect of the file identification, and the result can be used to guide the feature selection of the file identification.
+
+The innovations of this paper include the following two points:
+
+1) A predictive file identification generation method is proposed, which detects repeated multimedia files in the initial stage of multimedia transmission.
+
+2) A method of detecting the repeated multimedia files based on streaming fuzzy hash is proposed. This method can detect the repeated multimedia files, and the detection results can be used to guide feature selection of the predictive file identification generation.
+
+\KEYWORDS{Network Traffic, Repeated File, Identification Method, Predictive}% 英文关键词
+%---------------------------------------------------------------------------%
diff --git a/Tex/Mainmatter.tex b/Tex/Mainmatter.tex
new file mode 100644
index 0000000..b232f74
--- /dev/null
+++ b/Tex/Mainmatter.tex
@@ -0,0 +1,10 @@
+%---------------------------------------------------------------------------%
+%->> Main content
+%---------------------------------------------------------------------------%
+\input{Tex/Chap_1}
+\input{Tex/Chap_2}
+\input{Tex/Chap_3}
+\input{Tex/Chap_4}
+\input{Tex/Chap_5}
+\input{Tex/Chap_6}
+%---------------------------------------------------------------------------%
diff --git a/Tex/Prematter.tex b/Tex/Prematter.tex
new file mode 100644
index 0000000..17b9a49
--- /dev/null
+++ b/Tex/Prematter.tex
@@ -0,0 +1,32 @@
+\chapter*{符号列表}
+\chaptermark{符号列表}
+
+\section*{字符}
+\nomenclatureitem[\textbf{Unit}]{\textbf{Symbol}}{\textbf{Description}}
+\nomenclatureitem[$\Unit{m^{2} \cdot s^{-2} \cdot K^{-1}}$]{$R$}{the gas constant}
+\nomenclatureitem[$\Unit{m^{2} \cdot s^{-2} \cdot K^{-1}}$]{$C_v$}{specific heat capacity at constant volume}
+\nomenclatureitem[$\Unit{m^{2} \cdot s^{-2} \cdot K^{-1}}$]{$C_p$}{specific heat capacity at constant pressure}
+\nomenclatureitem[$\Unit{m^{2} \cdot s^{-2}}$]{$E$}{specific total energy}
+\nomenclatureitem[$\Unit{m^{2} \cdot s^{-2}}$]{$e$}{specific internal energy}
+\nomenclatureitem[$\Unit{m^{2} \cdot s^{-2}}$]{$h_T$}{specific total enthalpy}
+\nomenclatureitem[$\Unit{m^{2} \cdot s^{-2}}$]{$h$}{specific enthalpy}
+\nomenclatureitem[$\Unit{kg \cdot m \cdot s^{-3} \cdot K^{-1}}$]{$k$}{thermal conductivity}
+\nomenclatureitem[$\Unit{kg \cdot m^{-1} \cdot s^{-2}}$]{$S_{ij}$}{deviatoric stress tensor}
+\nomenclatureitem[$\Unit{kg \cdot m^{-1} \cdot s^{-2}}$]{$\tau_{ij}$}{viscous stress tensor}
+\nomenclatureitem[$\Unit{1}$]{$\delta_{ij}$}{Kronecker tensor}
+\nomenclatureitem[$\Unit{1}$]{$I_{ij}$}{identity tensor}
+
+\section*{算子}
+\nomenclatureitem{\textbf{Symbol}}{\textbf{Description}}
+\nomenclatureitem{$\Delta$}{difference}
+\nomenclatureitem{$\nabla$}{gradient operator}
+\nomenclatureitem{$\delta^{\pm}$}{upwind-biased interpolation scheme}
+
+\section*{缩写}
+\nomenclatureitem{CFD}{Computational Fluid Dynamics}
+\nomenclatureitem{CFL}{Courant-Friedrichs-Lewy}
+\nomenclatureitem{EOS}{Equation of State}
+\nomenclatureitem{JWL}{Jones-Wilkins-Lee}
+\nomenclatureitem{WENO}{Weighted Essentially Non-oscillatory}
+\nomenclatureitem{ZND}{Zel'dovich-von Neumann-Doering}
+
diff --git a/Thesis.log b/Thesis.log
new file mode 100644
index 0000000..9f1f0d6
--- /dev/null
+++ b/Thesis.log
@@ -0,0 +1,1185 @@
+This is pdfTeX, Version 3.14159265-2.6-1.40.19 (TeX Live 2018/W32TeX) (preloaded format=pdflatex 2019.3.18) 15 MAY 2019 17:25
+entering extended mode
+ restricted \write18 enabled.
+ %&-line parsing enabled.
+**Thesis.tex
+(./Thesis.tex
+LaTeX2e <2018-04-01> patch level 2
+Babel <3.18> and hyphenation patterns for 84 language(s) loaded.
+(./Style/ucasthesis.cls
+
+LaTeX Warning: You have requested document class `Style/ucasthesis',
+ but the document class provides `ucasthesis'.
+
+Document Class: ucasthesis 2014/10/01 v1.0 LaTeX document class
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctexbook.cls
+(c:/texlive/2018/texmf-dist/tex/latex/l3kernel/expl3.sty
+Package: expl3 2018/03/05 L3 programming layer (loader)
+
+(c:/texlive/2018/texmf-dist/tex/latex/l3kernel/expl3-code.tex
+Package: expl3 2018/03/05 L3 programming layer (code)
+\c_max_int=\count80
+\l_tmpa_int=\count81
+\l_tmpb_int=\count82
+\g_tmpa_int=\count83
+\g_tmpb_int=\count84
+\g__intarray_font_int=\count85
+\g__prg_map_int=\count86
+\c_log_iow=\count87
+\l_iow_line_count_int=\count88
+\l__iow_line_target_int=\count89
+\l__iow_one_indent_int=\count90
+\l__iow_indent_int=\count91
+\c_zero_dim=\dimen102
+\c_max_dim=\dimen103
+\l_tmpa_dim=\dimen104
+\l_tmpb_dim=\dimen105
+\g_tmpa_dim=\dimen106
+\g_tmpb_dim=\dimen107
+\c_zero_skip=\skip41
+\c_max_skip=\skip42
+\l_tmpa_skip=\skip43
+\l_tmpb_skip=\skip44
+\g_tmpa_skip=\skip45
+\g_tmpb_skip=\skip46
+\c_zero_muskip=\muskip10
+\c_max_muskip=\muskip11
+\l_tmpa_muskip=\muskip12
+\l_tmpb_muskip=\muskip13
+\g_tmpa_muskip=\muskip14
+\g_tmpb_muskip=\muskip15
+\l_keys_choice_int=\count92
+\c__fp_leading_shift_int=\count93
+\c__fp_middle_shift_int=\count94
+\c__fp_trailing_shift_int=\count95
+\c__fp_big_leading_shift_int=\count96
+\c__fp_big_middle_shift_int=\count97
+\c__fp_big_trailing_shift_int=\count98
+\c__fp_Bigg_leading_shift_int=\count99
+\c__fp_Bigg_middle_shift_int=\count100
+\c__fp_Bigg_trailing_shift_int=\count101
+\c__fp_rand_size_int=\count102
+\c__fp_rand_four_int=\count103
+\c__fp_rand_eight_int=\count104
+\l__sort_length_int=\count105
+\l__sort_min_int=\count106
+\l__sort_top_int=\count107
+\l__sort_max_int=\count108
+\l__sort_true_max_int=\count109
+\l__sort_block_int=\count110
+\l__sort_begin_int=\count111
+\l__sort_end_int=\count112
+\l__sort_A_int=\count113
+\l__sort_B_int=\count114
+\l__sort_C_int=\count115
+\l__tl_build_start_index_int=\count116
+\l__tl_build_index_int=\count117
+\l__tl_analysis_normal_int=\count118
+\l__tl_analysis_index_int=\count119
+\l__tl_analysis_nesting_int=\count120
+\l__tl_analysis_type_int=\count121
+\l__regex_internal_a_int=\count122
+\l__regex_internal_b_int=\count123
+\l__regex_internal_c_int=\count124
+\l__regex_balance_int=\count125
+\l__regex_group_level_int=\count126
+\l__regex_mode_int=\count127
+\c__regex_cs_in_class_mode_int=\count128
+\c__regex_cs_mode_int=\count129
+\l__regex_catcodes_int=\count130
+\l__regex_default_catcodes_int=\count131
+\c__regex_catcode_D_int=\count132
+\c__regex_catcode_S_int=\count133
+\c__regex_catcode_L_int=\count134
+\c__regex_catcode_O_int=\count135
+\c__regex_catcode_A_int=\count136
+\c__regex_all_catcodes_int=\count137
+\l__regex_show_lines_int=\count138
+\l__regex_min_state_int=\count139
+\l__regex_max_state_int=\count140
+\l__regex_left_state_int=\count141
+\l__regex_right_state_int=\count142
+\l__regex_capturing_group_int=\count143
+\l__regex_min_pos_int=\count144
+\l__regex_max_pos_int=\count145
+\l__regex_curr_pos_int=\count146
+\l__regex_start_pos_int=\count147
+\l__regex_success_pos_int=\count148
+\l__regex_curr_char_int=\count149
+\l__regex_curr_catcode_int=\count150
+\l__regex_last_char_int=\count151
+\l__regex_case_changed_char_int=\count152
+\l__regex_curr_state_int=\count153
+\l__regex_step_int=\count154
+\l__regex_min_active_int=\count155
+\l__regex_max_active_int=\count156
+\l__regex_replacement_csnames_int=\count157
+\l__regex_match_count_int=\count158
+\l__regex_min_submatch_int=\count159
+\l__regex_submatch_int=\count160
+\l__regex_zeroth_submatch_int=\count161
+\g__regex_trace_regex_int=\count162
+\c_empty_box=\box26
+\l_tmpa_box=\box27
+\l_tmpb_box=\box28
+\g_tmpa_box=\box29
+\g_tmpb_box=\box30
+\l__box_top_dim=\dimen108
+\l__box_bottom_dim=\dimen109
+\l__box_left_dim=\dimen110
+\l__box_right_dim=\dimen111
+\l__box_top_new_dim=\dimen112
+\l__box_bottom_new_dim=\dimen113
+\l__box_left_new_dim=\dimen114
+\l__box_right_new_dim=\dimen115
+\l__box_internal_box=\box31
+\l__coffin_internal_box=\box32
+\l__coffin_internal_dim=\dimen116
+\l__coffin_offset_x_dim=\dimen117
+\l__coffin_offset_y_dim=\dimen118
+\l__coffin_x_dim=\dimen119
+\l__coffin_y_dim=\dimen120
+\l__coffin_x_prime_dim=\dimen121
+\l__coffin_y_prime_dim=\dimen122
+\c_empty_coffin=\box33
+\l__coffin_aligned_coffin=\box34
+\l__coffin_aligned_internal_coffin=\box35
+\l_tmpa_coffin=\box36
+\l_tmpb_coffin=\box37
+\l__coffin_display_coffin=\box38
+\l__coffin_display_coord_coffin=\box39
+\l__coffin_display_pole_coffin=\box40
+\l__coffin_display_offset_dim=\dimen123
+\l__coffin_display_x_dim=\dimen124
+\l__coffin_display_y_dim=\dimen125
+\l__coffin_bounding_shift_dim=\dimen126
+\l__coffin_left_corner_dim=\dimen127
+\l__coffin_right_corner_dim=\dimen128
+\l__coffin_bottom_corner_dim=\dimen129
+\l__coffin_top_corner_dim=\dimen130
+\l__coffin_scaled_total_height_dim=\dimen131
+\l__coffin_scaled_width_dim=\dimen132
+)
+(c:/texlive/2018/texmf-dist/tex/latex/l3kernel/l3pdfmode.def
+File: l3pdfmode.def 2017/03/18 v L3 Experimental driver: PDF mode
+\l__driver_color_stack_int=\count163
+))
+Document Class: ctexbook 2018/01/28 v2.4.12 Chinese adapter for class book (CTE
+X)
+(c:/texlive/2018/texmf-dist/tex/latex/l3packages/xparse/xparse.sty
+Package: xparse 2018/02/21 L3 Experimental document command parser
+\l__xparse_current_arg_int=\count164
+\g__xparse_grabber_int=\count165
+\l__xparse_m_args_int=\count166
+\l__xparse_mandatory_args_int=\count167
+\l__xparse_v_nesting_int=\count168
+)
+(c:/texlive/2018/texmf-dist/tex/latex/l3packages/l3keys2e/l3keys2e.sty
+Package: l3keys2e 2018/02/21 LaTeX2e option processing using LaTeX3 keys
+)
+\g__file_internal_ior=\read1
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctexhook.sty
+Package: ctexhook 2018/01/28 v2.4.12 Document and package hooks (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctexpatch.sty
+Package: ctexpatch 2018/01/28 v2.4.12 Patching commands (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/base/fix-cm.sty
+Package: fix-cm 2015/01/14 v1.1t fixes to LaTeX
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/ts1enc.def
+File: ts1enc.def 2001/06/05 v3.0e (jk/car/fm) Standard LaTeX file
+Now handling font encoding TS1 ...
+... processing UTF-8 mapping file for font encoding TS1
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/ts1enc.dfu
+File: ts1enc.dfu 2018/04/05 v1.2c UTF-8 support for inputenc
+ defining Unicode char U+00A2 (decimal 162)
+ defining Unicode char U+00A3 (decimal 163)
+ defining Unicode char U+00A4 (decimal 164)
+ defining Unicode char U+00A5 (decimal 165)
+ defining Unicode char U+00A6 (decimal 166)
+ defining Unicode char U+00A7 (decimal 167)
+ defining Unicode char U+00A8 (decimal 168)
+ defining Unicode char U+00A9 (decimal 169)
+ defining Unicode char U+00AA (decimal 170)
+ defining Unicode char U+00AC (decimal 172)
+ defining Unicode char U+00AE (decimal 174)
+ defining Unicode char U+00AF (decimal 175)
+ defining Unicode char U+00B0 (decimal 176)
+ defining Unicode char U+00B1 (decimal 177)
+ defining Unicode char U+00B2 (decimal 178)
+ defining Unicode char U+00B3 (decimal 179)
+ defining Unicode char U+00B4 (decimal 180)
+ defining Unicode char U+00B5 (decimal 181)
+ defining Unicode char U+00B6 (decimal 182)
+ defining Unicode char U+00B7 (decimal 183)
+ defining Unicode char U+00B9 (decimal 185)
+ defining Unicode char U+00BA (decimal 186)
+ defining Unicode char U+00BC (decimal 188)
+ defining Unicode char U+00BD (decimal 189)
+ defining Unicode char U+00BE (decimal 190)
+ defining Unicode char U+00D7 (decimal 215)
+ defining Unicode char U+00F7 (decimal 247)
+ defining Unicode char U+0192 (decimal 402)
+ defining Unicode char U+02C7 (decimal 711)
+ defining Unicode char U+02D8 (decimal 728)
+ defining Unicode char U+02DD (decimal 733)
+ defining Unicode char U+0E3F (decimal 3647)
+ defining Unicode char U+2016 (decimal 8214)
+ defining Unicode char U+2020 (decimal 8224)
+ defining Unicode char U+2021 (decimal 8225)
+ defining Unicode char U+2022 (decimal 8226)
+ defining Unicode char U+2030 (decimal 8240)
+ defining Unicode char U+2031 (decimal 8241)
+ defining Unicode char U+203B (decimal 8251)
+ defining Unicode char U+203D (decimal 8253)
+ defining Unicode char U+2044 (decimal 8260)
+ defining Unicode char U+204E (decimal 8270)
+ defining Unicode char U+2052 (decimal 8274)
+ defining Unicode char U+20A1 (decimal 8353)
+ defining Unicode char U+20A4 (decimal 8356)
+ defining Unicode char U+20A6 (decimal 8358)
+ defining Unicode char U+20A9 (decimal 8361)
+ defining Unicode char U+20AB (decimal 8363)
+ defining Unicode char U+20AC (decimal 8364)
+ defining Unicode char U+20B1 (decimal 8369)
+ defining Unicode char U+2103 (decimal 8451)
+ defining Unicode char U+2116 (decimal 8470)
+ defining Unicode char U+2117 (decimal 8471)
+ defining Unicode char U+211E (decimal 8478)
+ defining Unicode char U+2120 (decimal 8480)
+ defining Unicode char U+2122 (decimal 8482)
+ defining Unicode char U+2126 (decimal 8486)
+ defining Unicode char U+2127 (decimal 8487)
+ defining Unicode char U+212E (decimal 8494)
+ defining Unicode char U+2190 (decimal 8592)
+ defining Unicode char U+2191 (decimal 8593)
+ defining Unicode char U+2192 (decimal 8594)
+ defining Unicode char U+2193 (decimal 8595)
+ defining Unicode char U+2329 (decimal 9001)
+ defining Unicode char U+232A (decimal 9002)
+ defining Unicode char U+2422 (decimal 9250)
+ defining Unicode char U+25E6 (decimal 9702)
+ defining Unicode char U+25EF (decimal 9711)
+ defining Unicode char U+266A (decimal 9834)
+ defining Unicode char U+FEFF (decimal 65279)
+)))
+(c:/texlive/2018/texmf-dist/tex/latex/ms/everysel.sty
+Package: everysel 2011/10/28 v1.2 EverySelectfont Package (MS)
+)
+\l__ctex_tmp_int=\count169
+\l__ctex_tmp_box=\box41
+\l__ctex_tmp_dim=\dimen133
+\g__ctex_font_size_flag=\count170
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/config/ctexopts.cfg
+File: ctexopts.cfg 2018/01/28 v2.4.12 Option configuration file (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/base/book.cls
+Document Class: book 2014/09/29 v1.4h Standard LaTeX document class
+(c:/texlive/2018/texmf-dist/tex/latex/base/bk12.clo
+File: bk12.clo 2014/09/29 v1.4h Standard LaTeX file (size option)
+)
+\c@part=\count171
+\c@chapter=\count172
+\c@section=\count173
+\c@subsection=\count174
+\c@subsubsection=\count175
+\c@paragraph=\count176
+\c@subparagraph=\count177
+\c@figure=\count178
+\c@table=\count179
+\abovecaptionskip=\skip47
+\belowcaptionskip=\skip48
+\bibindent=\dimen134
+)
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/engine/ctex-engine-pdftex.def
+File: ctex-engine-pdftex.def 2018/01/28 v2.4.12 (pdf)LaTeX adapter (CTEX)
+
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/CJKutf8.sty
+Package: CJKutf8 2015/04/18 4.8.4
+
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/ifpdf.sty
+Package: ifpdf 2017/03/15 v3.2 Provides the ifpdf switch
+)
+(c:/texlive/2018/texmf-dist/tex/latex/base/inputenc.sty
+Package: inputenc 2018/04/06 v1.3b Input encoding file
+\inpenc@prehook=\toks14
+\inpenc@posthook=\toks15
+)
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/CJK.sty
+Package: CJK 2015/04/18 4.8.4
+
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/mule/MULEenc.sty
+Package: MULEenc 2015/04/18 4.8.4
+)
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/CJK.enc
+File: CJK.enc 2015/04/18 4.8.4
+Now handling font encoding C00 ...
+... no UTF-8 mapping file for font encoding C00
+Now handling font encoding C05 ...
+... no UTF-8 mapping file for font encoding C05
+Now handling font encoding C09 ...
+... no UTF-8 mapping file for font encoding C09
+Now handling font encoding C10 ...
+... no UTF-8 mapping file for font encoding C10
+Now handling font encoding C20 ...
+... no UTF-8 mapping file for font encoding C20
+Now handling font encoding C19 ...
+... no UTF-8 mapping file for font encoding C19
+Now handling font encoding C40 ...
+... no UTF-8 mapping file for font encoding C40
+Now handling font encoding C42 ...
+... no UTF-8 mapping file for font encoding C42
+Now handling font encoding C43 ...
+... no UTF-8 mapping file for font encoding C43
+Now handling font encoding C50 ...
+... no UTF-8 mapping file for font encoding C50
+Now handling font encoding C52 ...
+... no UTF-8 mapping file for font encoding C52
+Now handling font encoding C49 ...
+... no UTF-8 mapping file for font encoding C49
+Now handling font encoding C60 ...
+... no UTF-8 mapping file for font encoding C60
+Now handling font encoding C61 ...
+... no UTF-8 mapping file for font encoding C61
+Now handling font encoding C63 ...
+... no UTF-8 mapping file for font encoding C63
+Now handling font encoding C64 ...
+... no UTF-8 mapping file for font encoding C64
+Now handling font encoding C65 ...
+... no UTF-8 mapping file for font encoding C65
+Now handling font encoding C70 ...
+... no UTF-8 mapping file for font encoding C70
+Now handling font encoding C31 ...
+... no UTF-8 mapping file for font encoding C31
+Now handling font encoding C32 ...
+... no UTF-8 mapping file for font encoding C32
+Now handling font encoding C33 ...
+... no UTF-8 mapping file for font encoding C33
+Now handling font encoding C34 ...
+... no UTF-8 mapping file for font encoding C34
+Now handling font encoding C35 ...
+... no UTF-8 mapping file for font encoding C35
+Now handling font encoding C36 ...
+... no UTF-8 mapping file for font encoding C36
+Now handling font encoding C37 ...
+... no UTF-8 mapping file for font encoding C37
+Now handling font encoding C80 ...
+... no UTF-8 mapping file for font encoding C80
+Now handling font encoding C81 ...
+... no UTF-8 mapping file for font encoding C81
+Now handling font encoding C01 ...
+... no UTF-8 mapping file for font encoding C01
+Now handling font encoding C11 ...
+... no UTF-8 mapping file for font encoding C11
+Now handling font encoding C21 ...
+... no UTF-8 mapping file for font encoding C21
+Now handling font encoding C41 ...
+... no UTF-8 mapping file for font encoding C41
+Now handling font encoding C62 ...
+... no UTF-8 mapping file for font encoding C62
+)
+LaTeX Info: Redefining \selectfont on input line 755.
+\CJK@indent=\box42
+)
+(c:/texlive/2018/texmf-dist/tex/latex/base/fontenc.sty
+Package: fontenc 2017/04/05 v2.0i Standard LaTeX package
+))
+(c:/texlive/2018/texmf-dist/tex/latex/cjkpunct/CJKpunct.sty
+Package: CJKpunct 2016/05/14 4.8.4
+\CJKpunct@cnta=\count180
+\CJKpunct@cntb=\count181
+\CJKpunct@cntc=\count182
+\CJKpunct@cntd=\count183
+\CJKpunct@cnte=\count184
+ defining Unicode char U+2018 (decimal 8216)
+ defining Unicode char U+2019 (decimal 8217)
+ defining Unicode char U+201C (decimal 8220)
+ defining Unicode char U+201D (decimal 8221)
+ defining Unicode char U+2014 (decimal 8212)
+ defining Unicode char U+2026 (decimal 8230)
+
+(c:/texlive/2018/texmf-dist/tex/latex/cjkpunct/CJKpunct.spa))
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/CJKspace.sty
+Package: CJKspace 2015/04/18 3.8.0
+)
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctexspa.def
+File: ctexspa.def 2018/01/28 v2.4.12 Space info for CJKpunct (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/CJKfntef.sty
+Package: CJKfntef 2015/04/18 4.8.4
+
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/CJKulem.sty
+Package: CJKulem 2015/04/18 4.8.4
+
+(c:/texlive/2018/texmf-dist/tex/generic/ulem/ulem.sty
+\UL@box=\box43
+\UL@hyphenbox=\box44
+\UL@skip=\skip49
+\UL@hook=\toks16
+\UL@height=\dimen135
+\UL@pe=\count185
+\UL@pixel=\dimen136
+\ULC@box=\box45
+Package: ulem 2012/05/18
+\ULdepth=\dimen137
+)
+\UL@lastkern=\dimen138
+\CJK@skip=\skip50
+)
+\CJK@fntefSkip=\skip51
+\CJK@nest=\count186
+\CJK@fntefDimen=\dimen139
+\CJK@underdotBox=\box46
+\CJK@ULbox=\box47
+\CJK@underanyskip=\dimen140
+)
+\ccwd=\dimen141
+\l__ctex_ccglue_skip=\skip52
+)
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \ctexset with sig. '' on line 388.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXsetup with sig. '+o>{\TrimSpaces }m' on line 394.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXoptions with sig. '+o' on line 400.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXsetfont with sig. '' on line 418.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \ziju with sig. 'm' on line 490.
+.................................................
+\l__ctex_ziju_dim=\dimen142
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXindent with sig. '' on line 531.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXnoindent with sig. '' on line 537.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/zhnumber/zhnumber.sty
+Package: zhnumber 2018/01/28 v2.6 Typesetting numbers with Chinese glyphs
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumber with sig. '+o+m' on line 50.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumberwithoptions with sig. '+m+m' on line 57.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnum with sig. '+o+m' on line 111.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumwithoptions with sig. '+m+m' on line 118.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdig with sig. '+o+m' on line 295.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdigwithoptions with sig. '+m+m' on line 302.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdigits with sig. '+s+o+m' on line 316.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdigitswithoptions with sig. '+m+m+m' on line 323.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdate with sig. '+s+m' on line 382.
+.................................................
+\l__zhnum_scale_int=\count187
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumExtendScaleMap with sig. '>{\TrimSpaces }+o+m' on
+. line 504.
+.................................................
+\l__zhnum_byte_min_int=\count188
+\l__zhnum_byte_max_int=\count189
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumsetup with sig. '+m' on line 934.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/zhnumber/zhnumber-utf8.cfg
+File: zhnumber-utf8.cfg 2018/01/28 v2.6 Chinese numerals with UTF8 encoding
+))
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXnumber with sig. 'mm' on line 554.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXdigits with sig. 'mm' on line 556.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \ctex_assign_heading_name:nn with sig. 'm>{\SplitArgument
+. {\c_one }{,}}+m' on line 681.
+.................................................
+\l__ctex_heading_skip=\skip53
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \partmark with sig. 'm' on line 728.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \refstepcounter with sig. 'm' on line 1323.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/scheme/ctex-scheme-chinese-book.def
+File: ctex-scheme-chinese-book.def 2018/01/28 v2.4.12 Chinese scheme for book (
+CTEX)
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/config/ctex-name-utf8.cfg
+File: ctex-name-utf8.cfg 2018/01/28 v2.4.12 Caption with encoding UTF8 (CTEX)
+))
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zihao with sig. 'm' on line 1326.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctex-cs4size.clo
+File: ctex-cs4size.clo 2018/01/28 v2.4.12 cs4size option (CTEX)
+)
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTeX with sig. '' on line 1430.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/fontset/ctex-fontset-windows.def
+File: ctex-fontset-windows.def 2018/01/28 v2.4.12 Windows fonts definition (CTE
+X)
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/fontset/ctex-fontset-windowsnew.def
+File: ctex-fontset-windowsnew.def 2018/01/28 v2.4.12 Windows fonts definition f
+or Vista or later version (CTEX)
+
+(c:/texlive/2018/texmf-dist/tex/generic/ctex/zhwindowsfonts.tex
+File: zhwindowsfonts.tex 2018/01/28 v2.4.12 Windows font map loader for pdfTeX
+and DVIPDFMx (CTEX)
+{c:/texlive/2018/texmf-dist/fonts/map/pdftex/updmap/pdftex.map}{UGBK.sfd}{Unico
+de.sfd})
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \songti with sig. '' on line 111.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \heiti with sig. '' on line 112.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \fangsong with sig. '' on line 113.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \kaishu with sig. '' on line 114.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \lishu with sig. '' on line 115.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \youyuan with sig. '' on line 116.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \yahei with sig. '' on line 117.
+.................................................
+)))
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/config/ctex.cfg
+File: ctex.cfg 2018/01/28 v2.4.12 Configuration file (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/generic/ifxetex/ifxetex.sty
+Package: ifxetex 2010/09/12 v0.6 Provides ifxetex conditional
+)
+(c:/texlive/2018/texmf-dist/tex/latex/etoolbox/etoolbox.sty
+Package: etoolbox 2018/02/11 v2.5e e-TeX tools for LaTeX (JAW)
+\etb@tempcnta=\count190
+))
+(./Style/ucasthesis.cfg
+File: ucasthesis.cfg 2014/10/01 v1.0 class configuration file
+) (./Style/artratex.sty
+
+LaTeX Warning: You have requested package `Style/artratex',
+ but the package provides `artratex'.
+
+Package: artratex 2014/10/01 v1.0 LaTeX macros package
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/ifluatex.sty
+Package: ifluatex 2016/05/16 v1.4 Provides the ifluatex switch (HO)
+Package ifluatex Info: LuaTeX not detected.
+)
+
+Package artratex Warning: Unknown option 'myhdr' on input line 176.
+
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsmath.sty
+Package: amsmath 2017/09/02 v2.17a AMS math features
+\@mathmargin=\skip54
+
+For additional information on amsmath, use the `?' option.
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amstext.sty
+Package: amstext 2000/06/29 v2.01 AMS text
+
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsgen.sty
+File: amsgen.sty 1999/11/30 v2.0 generic functions
+\@emptytoks=\toks17
+\ex@=\dimen143
+))
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsbsy.sty
+Package: amsbsy 1999/11/29 v1.2d Bold Symbols
+\pmbraise@=\dimen144
+)
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsopn.sty
+Package: amsopn 2016/03/08 v2.02 operator names
+)
+\inf@bad=\count191
+LaTeX Info: Redefining \frac on input line 213.
+\uproot@=\count192
+\leftroot@=\count193
+LaTeX Info: Redefining \overline on input line 375.
+\classnum@=\count194
+\DOTSCASE@=\count195
+LaTeX Info: Redefining \ldots on input line 472.
+LaTeX Info: Redefining \dots on input line 475.
+LaTeX Info: Redefining \cdots on input line 596.
+\Mathstrutbox@=\box48
+\strutbox@=\box49
+\big@size=\dimen145
+LaTeX Font Info: Redeclaring font encoding OML on input line 712.
+LaTeX Font Info: Redeclaring font encoding OMS on input line 713.
+\macc@depth=\count196
+\c@MaxMatrixCols=\count197
+\dotsspace@=\muskip16
+\c@parentequation=\count198
+\dspbrk@lvl=\count199
+\tag@help=\toks18
+\row@=\count266
+\column@=\count267
+\maxfields@=\count268
+\andhelp@=\toks19
+\eqnshift@=\dimen146
+\alignsep@=\dimen147
+\tagshift@=\dimen148
+\tagwidth@=\dimen149
+\totwidth@=\dimen150
+\lineht@=\dimen151
+\@envbody=\toks20
+\multlinegap=\skip55
+\multlinetaggap=\skip56
+\mathdisplay@stack=\toks21
+LaTeX Info: Redefining \[ on input line 2817.
+LaTeX Info: Redefining \] on input line 2818.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/amscls/amsthm.sty
+Package: amsthm 2017/10/31 v2.20.4
+\thm@style=\toks22
+\thm@bodyfont=\toks23
+\thm@headfont=\toks24
+\thm@notefont=\toks25
+\thm@headpunct=\toks26
+\thm@preskip=\skip57
+\thm@postskip=\skip58
+\thm@headsep=\skip59
+\dth@everypar=\toks27
+)
+(c:/texlive/2018/texmf-dist/tex/latex/amsfonts/amssymb.sty
+Package: amssymb 2013/01/14 v3.01 AMS font symbols
+
+(c:/texlive/2018/texmf-dist/tex/latex/amsfonts/amsfonts.sty
+Package: amsfonts 2013/01/14 v3.01 Basic AMSFonts support
+\symAMSa=\mathgroup4
+\symAMSb=\mathgroup5
+LaTeX Font Info: Overwriting math alphabet `\mathfrak' in version `bold'
+(Font) U/euf/m/n --> U/euf/b/n on input line 106.
+))
+\c@theorem=\count269
+\c@definition=\count270
+\c@example=\count271
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/fontenc.sty
+Package: fontenc 2017/04/05 v2.0i Standard LaTeX package
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/t1enc.def
+File: t1enc.def 2017/04/05 v2.0i Standard LaTeX file
+LaTeX Font Info: Redeclaring font encoding T1 on input line 48.
+))
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/newtxtext.sty
+Package: newtxtext 2018/03/27 v1.531
+
+`newtxtext' v1.531, 2018/03/27 Text macros taking advantage of TeX-Gyre Termes
+fonts (msharpe) (c:/texlive/2018/texmf-dist/tex/latex/fontaxes/fontaxes.sty
+Package: fontaxes 2014/03/23 v1.0d Font selection axes
+LaTeX Info: Redefining \upshape on input line 29.
+LaTeX Info: Redefining \itshape on input line 31.
+LaTeX Info: Redefining \slshape on input line 33.
+LaTeX Info: Redefining \scshape on input line 37.
+)
+\ntx@fs=\dimen152
+
+(c:/texlive/2018/texmf-dist/tex/latex/xkeyval/xkeyval.sty
+Package: xkeyval 2014/12/03 v2.7a package option processing (HA)
+
+(c:/texlive/2018/texmf-dist/tex/generic/xkeyval/xkeyval.tex
+(c:/texlive/2018/texmf-dist/tex/generic/xkeyval/xkvutils.tex
+\XKV@toks=\toks28
+\XKV@tempa@toks=\toks29
+
+(c:/texlive/2018/texmf-dist/tex/generic/xkeyval/keyval.tex))
+\XKV@depth=\count272
+File: xkeyval.tex 2014/12/03 v2.7a key=value parser (HA)
+))
+(c:/texlive/2018/texmf-dist/tex/latex/base/fontenc.sty
+Package: fontenc 2017/04/05 v2.0i Standard LaTeX package
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/t1enc.def
+File: t1enc.def 2017/04/05 v2.0i Standard LaTeX file
+LaTeX Font Info: Redeclaring font encoding T1 on input line 48.
+))
+(c:/texlive/2018/texmf-dist/tex/latex/base/textcomp.sty
+Package: textcomp 2017/04/05 v2.0i Standard LaTeX package
+Package textcomp Info: Sub-encoding information:
+(textcomp) 5 = only ISO-Adobe without \textcurrency
+(textcomp) 4 = 5 + \texteuro
+(textcomp) 3 = 4 + \textohm
+(textcomp) 2 = 3 + \textestimated + \textcurrency
+(textcomp) 1 = TS1 - \textcircled - \t
+(textcomp) 0 = TS1 (full)
+(textcomp) Font families with sub-encoding setting implement
+(textcomp) only a restricted character set as indicated.
+(textcomp) Family '?' is the default used for unknown fonts.
+(textcomp) See the documentation for details.
+Package textcomp Info: Setting ? sub-encoding to TS1/1 on input line 79.
+Package textcomp Info: Changing ? sub-encoding to TS1/0 on input line 80.
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/ts1enc.def
+File: ts1enc.def 2001/06/05 v3.0e (jk/car/fm) Standard LaTeX file
+LaTeX Font Info: Redeclaring font encoding TS1 on input line 47.
+)
+LaTeX Info: Redefining \oldstylenums on input line 334.
+Package textcomp Info: Setting cmr sub-encoding to TS1/0 on input line 349.
+Package textcomp Info: Setting cmss sub-encoding to TS1/0 on input line 350.
+Package textcomp Info: Setting cmtt sub-encoding to TS1/0 on input line 351.
+Package textcomp Info: Setting cmvtt sub-encoding to TS1/0 on input line 352.
+Package textcomp Info: Setting cmbr sub-encoding to TS1/0 on input line 353.
+Package textcomp Info: Setting cmtl sub-encoding to TS1/0 on input line 354.
+Package textcomp Info: Setting ccr sub-encoding to TS1/0 on input line 355.
+Package textcomp Info: Setting ptm sub-encoding to TS1/4 on input line 356.
+Package textcomp Info: Setting pcr sub-encoding to TS1/4 on input line 357.
+Package textcomp Info: Setting phv sub-encoding to TS1/4 on input line 358.
+Package textcomp Info: Setting ppl sub-encoding to TS1/3 on input line 359.
+Package textcomp Info: Setting pag sub-encoding to TS1/4 on input line 360.
+Package textcomp Info: Setting pbk sub-encoding to TS1/4 on input line 361.
+Package textcomp Info: Setting pnc sub-encoding to TS1/4 on input line 362.
+Package textcomp Info: Setting pzc sub-encoding to TS1/4 on input line 363.
+Package textcomp Info: Setting bch sub-encoding to TS1/4 on input line 364.
+Package textcomp Info: Setting put sub-encoding to TS1/5 on input line 365.
+Package textcomp Info: Setting uag sub-encoding to TS1/5 on input line 366.
+Package textcomp Info: Setting ugq sub-encoding to TS1/5 on input line 367.
+Package textcomp Info: Setting ul8 sub-encoding to TS1/4 on input line 368.
+Package textcomp Info: Setting ul9 sub-encoding to TS1/4 on input line 369.
+Package textcomp Info: Setting augie sub-encoding to TS1/5 on input line 370.
+Package textcomp Info: Setting dayrom sub-encoding to TS1/3 on input line 371.
+Package textcomp Info: Setting dayroms sub-encoding to TS1/3 on input line 372.
+
+Package textcomp Info: Setting pxr sub-encoding to TS1/0 on input line 373.
+Package textcomp Info: Setting pxss sub-encoding to TS1/0 on input line 374.
+Package textcomp Info: Setting pxtt sub-encoding to TS1/0 on input line 375.
+Package textcomp Info: Setting txr sub-encoding to TS1/0 on input line 376.
+Package textcomp Info: Setting txss sub-encoding to TS1/0 on input line 377.
+Package textcomp Info: Setting txtt sub-encoding to TS1/0 on input line 378.
+Package textcomp Info: Setting lmr sub-encoding to TS1/0 on input line 379.
+Package textcomp Info: Setting lmdh sub-encoding to TS1/0 on input line 380.
+Package textcomp Info: Setting lmss sub-encoding to TS1/0 on input line 381.
+Package textcomp Info: Setting lmssq sub-encoding to TS1/0 on input line 382.
+Package textcomp Info: Setting lmvtt sub-encoding to TS1/0 on input line 383.
+Package textcomp Info: Setting lmtt sub-encoding to TS1/0 on input line 384.
+Package textcomp Info: Setting qhv sub-encoding to TS1/0 on input line 385.
+Package textcomp Info: Setting qag sub-encoding to TS1/0 on input line 386.
+Package textcomp Info: Setting qbk sub-encoding to TS1/0 on input line 387.
+Package textcomp Info: Setting qcr sub-encoding to TS1/0 on input line 388.
+Package textcomp Info: Setting qcs sub-encoding to TS1/0 on input line 389.
+Package textcomp Info: Setting qpl sub-encoding to TS1/0 on input line 390.
+Package textcomp Info: Setting qtm sub-encoding to TS1/0 on input line 391.
+Package textcomp Info: Setting qzc sub-encoding to TS1/0 on input line 392.
+Package textcomp Info: Setting qhvc sub-encoding to TS1/0 on input line 393.
+Package textcomp Info: Setting futs sub-encoding to TS1/4 on input line 394.
+Package textcomp Info: Setting futx sub-encoding to TS1/4 on input line 395.
+Package textcomp Info: Setting futj sub-encoding to TS1/4 on input line 396.
+Package textcomp Info: Setting hlh sub-encoding to TS1/3 on input line 397.
+Package textcomp Info: Setting hls sub-encoding to TS1/3 on input line 398.
+Package textcomp Info: Setting hlst sub-encoding to TS1/3 on input line 399.
+Package textcomp Info: Setting hlct sub-encoding to TS1/5 on input line 400.
+Package textcomp Info: Setting hlx sub-encoding to TS1/5 on input line 401.
+Package textcomp Info: Setting hlce sub-encoding to TS1/5 on input line 402.
+Package textcomp Info: Setting hlcn sub-encoding to TS1/5 on input line 403.
+Package textcomp Info: Setting hlcw sub-encoding to TS1/5 on input line 404.
+Package textcomp Info: Setting hlcf sub-encoding to TS1/5 on input line 405.
+Package textcomp Info: Setting pplx sub-encoding to TS1/3 on input line 406.
+Package textcomp Info: Setting pplj sub-encoding to TS1/3 on input line 407.
+Package textcomp Info: Setting ptmx sub-encoding to TS1/4 on input line 408.
+Package textcomp Info: Setting ptmj sub-encoding to TS1/4 on input line 409.
+))
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/newtxmath.sty
+Package: newtxmath 2017/12/14 v1.53
+
+`newtxmath' v1.53, 2017/12/14 Math macros based on txfonts (msharpe)
+(c:/texlive/2018/texmf-dist/tex/latex/oberdiek/centernot.sty
+Package: centernot 2016/05/16 v1.4 Centers the not symbol horizontally (HO)
+)
+(c:/texlive/2018/texmf-dist/tex/generic/kastrup/binhex.tex)
+\tx@Isdigit=\count273
+LaTeX Font Info: Redeclaring symbol font `operators' on input line 156.
+LaTeX Font Info: Overwriting symbol font `operators' in version `normal'
+(Font) OT1/cmr/m/n --> OT1/ntxtlf/m/n on input line 156.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) OT1/cmr/bx/n --> OT1/ntxtlf/m/n on input line 156.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) OT1/ntxtlf/m/n --> OT1/ntxtlf/b/n on input line 157.
+LaTeX Font Info: Redeclaring math alphabet \mathsf on input line 163.
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `normal'
+(Font) OT1/cmss/m/n --> T1/qhv/m/n on input line 163.
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `bold'
+(Font) OT1/cmss/bx/n --> T1/qhv/m/n on input line 163.
+LaTeX Font Info: Redeclaring math alphabet \mathit on input line 164.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `normal'
+(Font) OT1/cmr/m/it --> OT1/ntxtlf/m/it on input line 164.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `bold'
+(Font) OT1/cmr/bx/it --> OT1/ntxtlf/m/it on input line 164.
+LaTeX Font Info: Redeclaring math alphabet \mathtt on input line 165.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `normal'
+(Font) OT1/cmtt/m/n --> T1/ntxtt/m/n on input line 165.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `bold'
+(Font) OT1/cmtt/m/n --> T1/ntxtt/m/n on input line 165.
+LaTeX Font Info: Redeclaring math alphabet \mathbf on input line 167.
+LaTeX Font Info: Overwriting math alphabet `\mathbf' in version `normal'
+(Font) OT1/cmr/bx/n --> OT1/ntxtlf/b/n on input line 167.
+LaTeX Font Info: Overwriting math alphabet `\mathbf' in version `bold'
+(Font) OT1/cmr/bx/n --> OT1/ntxtlf/b/n on input line 167.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `bold'
+(Font) OT1/ntxtlf/m/it --> OT1/ntxtlf/b/it on input line 168.
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `bold'
+(Font) T1/qhv/m/n --> T1/qhv/b/n on input line 169.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `bold'
+(Font) T1/ntxtt/m/n --> T1/ntxtt/b/n on input line 170.
+LaTeX Font Info: Redeclaring symbol font `letters' on input line 221.
+LaTeX Font Info: Overwriting symbol font `letters' in version `normal'
+(Font) OML/cmm/m/it --> OML/ntxmi/m/it on input line 221.
+LaTeX Font Info: Overwriting symbol font `letters' in version `bold'
+(Font) OML/cmm/b/it --> OML/ntxmi/m/it on input line 221.
+LaTeX Font Info: Overwriting symbol font `letters' in version `bold'
+(Font) OML/ntxmi/m/it --> OML/ntxmi/b/it on input line 222.
+\symlettersA=\mathgroup6
+LaTeX Font Info: Overwriting symbol font `lettersA' in version `bold'
+(Font) U/ntxmia/m/it --> U/ntxmia/b/it on input line 238.
+LaTeX Font Info: Redeclaring math alphabet \mathfrak on input line 240.
+Now handling font encoding LMS ...
+... no UTF-8 mapping file for font encoding LMS
+LaTeX Font Info: Redeclaring symbol font `symbols' on input line 259.
+LaTeX Font Info: Encoding `OMS' has changed to `LMS' for symbol font
+(Font) `symbols' in the math version `normal' on input line 259.
+LaTeX Font Info: Overwriting symbol font `symbols' in version `normal'
+(Font) OMS/cmsy/m/n --> LMS/ntxsy/m/n on input line 259.
+LaTeX Font Info: Encoding `OMS' has changed to `LMS' for symbol font
+(Font) `symbols' in the math version `bold' on input line 259.
+LaTeX Font Info: Overwriting symbol font `symbols' in version `bold'
+(Font) OMS/cmsy/b/n --> LMS/ntxsy/m/n on input line 259.
+LaTeX Font Info: Overwriting symbol font `symbols' in version `bold'
+(Font) LMS/ntxsy/m/n --> LMS/ntxsy/b/n on input line 260.
+\symAMSm=\mathgroup7
+LaTeX Font Info: Overwriting symbol font `AMSm' in version `bold'
+(Font) U/ntxsym/m/n --> U/ntxsym/b/n on input line 283.
+\symsymbolsC=\mathgroup8
+LaTeX Font Info: Overwriting symbol font `symbolsC' in version `bold'
+(Font) U/ntxsyc/m/n --> U/ntxsyc/b/n on input line 304.
+Now handling font encoding LMX ...
+... no UTF-8 mapping file for font encoding LMX
+LaTeX Font Info: Redeclaring symbol font `largesymbols' on input line 317.
+LaTeX Font Info: Encoding `OMX' has changed to `LMX' for symbol font
+(Font) `largesymbols' in the math version `normal' on input line 3
+17.
+LaTeX Font Info: Overwriting symbol font `largesymbols' in version `normal'
+(Font) OMX/cmex/m/n --> LMX/ntxexx/m/n on input line 317.
+LaTeX Font Info: Encoding `OMX' has changed to `LMX' for symbol font
+(Font) `largesymbols' in the math version `bold' on input line 317
+.
+LaTeX Font Info: Overwriting symbol font `largesymbols' in version `bold'
+(Font) OMX/cmex/m/n --> LMX/ntxexx/m/n on input line 317.
+LaTeX Font Info: Overwriting symbol font `largesymbols' in version `bold'
+(Font) LMX/ntxexx/m/n --> LMX/ntxexx/b/n on input line 318.
+\symlargesymbolsTXA=\mathgroup9
+LaTeX Font Info: Overwriting symbol font `largesymbolsTXA' in version `bold'
+
+(Font) U/ntxexa/m/n --> U/ntxexa/b/n on input line 332.
+LaTeX Font Info: Redeclaring math delimiter \lgroup on input line 503.
+LaTeX Font Info: Redeclaring math delimiter \rgroup on input line 504.
+LaTeX Font Info: Redeclaring math delimiter \lmoustache on input line 505.
+LaTeX Font Info: Redeclaring math delimiter \rmoustache on input line 506.
+LaTeX Font Info: Redeclaring math delimiter \lfloor on input line 512.
+LaTeX Font Info: Redeclaring math delimiter \rfloor on input line 513.
+LaTeX Font Info: Redeclaring math delimiter \lceil on input line 514.
+LaTeX Font Info: Redeclaring math delimiter \rceil on input line 515.
+LaTeX Font Info: Redeclaring math delimiter \lbrace on input line 516.
+LaTeX Font Info: Redeclaring math delimiter \rbrace on input line 517.
+LaTeX Font Info: Redeclaring math delimiter \langle on input line 518.
+LaTeX Font Info: Redeclaring math delimiter \rangle on input line 520.
+LaTeX Font Info: Redeclaring math symbol \mid on input line 522.
+LaTeX Font Info: Redeclaring math delimiter \arrowvert on input line 524.
+LaTeX Font Info: Redeclaring math delimiter \vert on input line 525.
+LaTeX Font Info: Redeclaring math symbol \parallel on input line 526.
+LaTeX Font Info: Redeclaring math delimiter \Arrowvert on input line 527.
+LaTeX Font Info: Redeclaring math delimiter \Vert on input line 528.
+LaTeX Font Info: Redeclaring math delimiter \updownarrow on input line 530.
+LaTeX Font Info: Redeclaring math delimiter \Updownarrow on input line 531.
+LaTeX Font Info: Redeclaring math delimiter \lvert on input line 536.
+LaTeX Font Info: Redeclaring math delimiter \rvert on input line 537.
+LaTeX Font Info: Redeclaring math delimiter \lVert on input line 538.
+LaTeX Font Info: Redeclaring math delimiter \rVert on input line 539.
+LaTeX Info: Redefining \not on input line 2146.
+LaTeX Info: Redefining \textsquare on input line 2176.
+LaTeX Info: Redefining \openbox on input line 2178.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/jknapltx/mathrsfs.sty
+Package: mathrsfs 1996/01/01 Math RSFS package v1.0 (jk)
+\symrsfs=\mathgroup10
+)
+(c:/texlive/2018/texmf-dist/tex/latex/mathalfa/mathalfa.sty
+Package: mathalfa 2017/02/23 - 1.09mathalfa (License LPPL) Michael Sharpe
+LaTeX Font Info: Redeclaring math alphabet \mathcal on input line 189.
+LaTeX Font Info: Overwriting math alphabet `\mathcal' in version `bold'
+(Font) U/cmsy/m/n --> U/cmsy/b/n on input line 190.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/natbib/natbib.sty
+Package: natbib 2010/09/13 8.31b (PWD, AO)
+\bibhang=\skip60
+\bibsep=\skip61
+LaTeX Info: Redefining \cite on input line 694.
+\c@NAT@ctr=\count274
+)
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/graphicx.sty
+Package: graphicx 2017/06/01 v1.1a Enhanced LaTeX Graphics (DPC,SPQR)
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/graphics.sty
+Package: graphics 2017/06/25 v1.2c Standard LaTeX Graphics (DPC,SPQR)
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/trig.sty
+Package: trig 2016/01/03 v1.10 sin cos tan (DPC)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/graphics-cfg/graphics.cfg
+File: graphics.cfg 2016/06/04 v1.11 sample graphics configuration
+)
+Package graphics Info: Driver file: pdftex.def on input line 99.
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics-def/pdftex.def
+File: pdftex.def 2018/01/08 v1.0l Graphics/color driver for pdftex
+))
+\Gin@req@height=\dimen153
+\Gin@req@width=\dimen154
+)
+(c:/texlive/2018/texmf-dist/tex/latex/caption/caption.sty
+Package: caption 2016/02/21 v3.3-144 Customizing captions (AR)
+
+(c:/texlive/2018/texmf-dist/tex/latex/caption/caption3.sty
+Package: caption3 2016/05/22 v1.7-166 caption3 kernel (AR)
+Package caption3 Info: TeX engine: e-TeX on input line 67.
+\captionmargin=\dimen155
+\captionmargin@=\dimen156
+\captionwidth=\dimen157
+\caption@tempdima=\dimen158
+\caption@indent=\dimen159
+\caption@parindent=\dimen160
+\caption@hangindent=\dimen161
+)
+\c@ContinuedFloat=\count275
+)
+(c:/texlive/2018/texmf-dist/tex/latex/caption/subcaption.sty
+Package: subcaption 2016/05/22 v1.1-161 Sub-captions (AR)
+\c@subfigure=\count276
+\c@subtable=\count277
+)
+(c:/texlive/2018/texmf-dist/tex/latex/caption/bicaption.sty
+Package: bicaption 2016/03/27 v1.1-158 Bilingual Captions (AR)
+\bicaption@lang=\count278
+Package bicaption Info: main language is not set.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/placeins/placeins.sty
+Package: placeins 2005/04/18 v 2.2
+)
+(c:/texlive/2018/texmf-dist/tex/latex/fancyhdr/fancyhdr.sty
+Package: fancyhdr 2017/06/30 v3.9a Extensive control of page headers and footer
+s
+\f@nch@headwidth=\skip62
+\f@nch@O@elh=\skip63
+\f@nch@O@erh=\skip64
+\f@nch@O@olh=\skip65
+\f@nch@O@orh=\skip66
+\f@nch@O@elf=\skip67
+\f@nch@O@erf=\skip68
+\f@nch@O@olf=\skip69
+\f@nch@O@orf=\skip70
+)
+(c:/texlive/2018/texmf-dist/tex/latex/xcolor/xcolor.sty
+Package: xcolor 2016/05/11 v2.12 LaTeX color extensions (UK)
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics-cfg/color.cfg
+File: color.cfg 2016/01/02 v1.6 sample color configuration
+)
+Package xcolor Info: Package option `usenames' ignored on input line 216.
+Package xcolor Info: Driver file: pdftex.def on input line 225.
+
+(c:/texlive/2018/texmf-dist/tex/latex/colortbl/colortbl.sty
+Package: colortbl 2012/02/13 v1.0a Color table columns (DPC)
+
+(c:/texlive/2018/texmf-dist/tex/latex/tools/array.sty
+Package: array 2018/04/07 v2.4g Tabular extension package (FMi)
+\col@sep=\dimen162
+\ar@mcellbox=\box50
+\extrarowheight=\dimen163
+\NC@list=\toks30
+\extratabsurround=\skip71
+\backup@length=\skip72
+\ar@cellbox=\box51
+)
+\everycr=\toks31
+\minrowclearance=\skip73
+)
+\rownum=\count279
+Package xcolor Info: Model `cmy' substituted by `cmy0' on input line 1348.
+Package xcolor Info: Model `hsb' substituted by `rgb' on input line 1352.
+Package xcolor Info: Model `RGB' extended on input line 1364.
+Package xcolor Info: Model `HTML' substituted by `rgb' on input line 1366.
+Package xcolor Info: Model `Hsb' substituted by `hsb' on input line 1367.
+Package xcolor Info: Model `tHsb' substituted by `hsb' on input line 1368.
+Package xcolor Info: Model `HSB' substituted by `hsb' on input line 1369.
+Package xcolor Info: Model `Gray' substituted by `gray' on input line 1370.
+Package xcolor Info: Model `wave' substituted by `hsb' on input line 1371.
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/dvipsnam.def
+File: dvipsnam.def 2016/06/17 v3.0m Driver-dependent file (DPC,SPQR)
+))
+(c:/texlive/2018/texmf-dist/tex/latex/tools/verbatim.sty
+Package: verbatim 2014/10/28 v1.5q LaTeX2e package for verbatim enhancements
+\every@verbatim=\toks32
+\verbatim@line=\toks33
+\verbatim@in@stream=\read2
+)
+(c:/texlive/2018/texmf-dist/tex/latex/enumitem/enumitem.sty
+Package: enumitem 2011/09/28 v3.5.2 Customized lists
+\enitkv@toks@=\toks34
+\labelindent=\skip74
+\enit@outerparindent=\dimen164
+\enit@toks=\toks35
+\enit@inbox=\box52
+\enitdp@description=\count280
+)
+(c:/texlive/2018/texmf-dist/tex/latex/listings/listings.sty
+\lst@mode=\count281
+\lst@gtempboxa=\box53
+\lst@token=\toks36
+\lst@length=\count282
+\lst@currlwidth=\dimen165
+\lst@column=\count283
+\lst@pos=\count284
+\lst@lostspace=\dimen166
+\lst@width=\dimen167
+\lst@newlines=\count285
+\lst@lineno=\count286
+\lst@maxwidth=\dimen168
+
+(c:/texlive/2018/texmf-dist/tex/latex/listings/lstmisc.sty
+File: lstmisc.sty 2015/06/04 1.6 (Carsten Heinz)
+\c@lstnumber=\count287
+\lst@skipnumbers=\count288
+\lst@framebox=\box54
+)
+(c:/texlive/2018/texmf-dist/tex/latex/listings/listings.cfg
+File: listings.cfg 2015/06/04 1.6 listings configuration
+))
+Package: listings 2015/06/04 1.6 (Carsten Heinz)
+
+(c:/texlive/2018/texmf-dist/tex/latex/algorithmicx/algpseudocode.sty
+Package: algpseudocode
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/ifthen.sty
+Package: ifthen 2014/09/29 v1.1c Standard LaTeX ifthen package (DPC)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/algorithmicx/algorithmicx.sty
+Package: algorithmicx 2005/04/27 v1.2 Algorithmicx
+
+Document Style algorithmicx 1.2 - a greatly improved `algorithmic' style
+\c@ALG@line=\count289
+\c@ALG@rem=\count290
+\c@ALG@nested=\count291
+\ALG@tlm=\skip75
+\ALG@thistlm=\skip76
+\c@ALG@Lnr=\count292
+\c@ALG@blocknr=\count293
+\c@ALG@storecount=\count294
+\c@ALG@tmpcounter=\count295
+\ALG@tmplength=\skip77
+)
+Document Style - pseudocode environments for use with the `algorithmicx' style
+) (c:/texlive/2018/texmf-dist/tex/latex/algorithms/algorithm.sty
+Package: algorithm 2009/08/24 v0.1 Document Style `algorithm' - floating enviro
+nment
+
+(c:/texlive/2018/texmf-dist/tex/latex/float/float.sty
+Package: float 2001/11/08 v1.3d Float enhancements (AL)
+\c@float@type=\count296
+\float@exts=\toks37
+\float@box=\box55
+\@float@everytoks=\toks38
+\@f \ No newline at end of file
diff --git a/Thesis.synctex(busy) b/Thesis.synctex(busy)
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Thesis.synctex(busy)
diff --git a/Thesis.tex b/Thesis.tex
new file mode 100644
index 0000000..cf0bf25
--- /dev/null
+++ b/Thesis.tex
@@ -0,0 +1,103 @@
+%---------------------------------------------------------------------------%
+%- -%
+%- LaTeX Template -%
+%- -%
+%---------------------------------------------------------------------------%
+%- Copyright (C) Huangrui Mo <[email protected]>
+%- This is free software: you can redistribute it and/or modify it
+%- under the terms of the GNU General Public License as published by
+%- the Free Software Foundation, either version 3 of the License, or
+%- (at your option) any later version.
+%---------------------------------------------------------------------------%
+%->> Document class declaration
+%---------------------------------------------------------------------------%
+\documentclass[doublesided]{Style/ucasthesis}%
+%- Multiple optional arguments:
+%- [<singlesided|doublesided|printcopy>]% set one or two sided eprint or print
+%- [fontset=<adobe|...>]% specify font set to replace automatic detection
+%- [plain]% thesis writing of international students
+%- [draftversion]% show draft version information
+%- [standard options for ctex book class: draft|paper size|font size|...]%
+%---------------------------------------------------------------------------%
+%->> Document settings
+%---------------------------------------------------------------------------%
+\usepackage[authoryear,myhdr,list]{Style/artratex}% document settings
+%- usage: \usepackage[option1,option2,...,optionN]{artratex}
+%- Multiple optional arguments:
+%- [bibtex|biber]% set bibliography processor and package
+%- [<numbers|super|authoryear|alpha>]% set citation and reference style
+%- <numbers>: textual: Jones [1]; parenthetical: [1]
+%- <super>: textual: Jones superscript [1]; parenthetical: superscript [1]
+%- <authoryear>: textual: Jones (1995); parenthetical: (Jones, 1995)
+%- <alpha>: textual: not available; parenthetical: [Jon95]
+%- [geometry]% reconfigure page layout via geometry package
+%- [lscape]% provide landscape layout environment
+%- [myhdr]% enable header and footer via fancyhdr package
+%- [color]% provide color support via xcolor package
+%- [background]% enable page background
+%- [tikz]% provide complex diagrams via tikz package
+%- [table]% provide complex tables via ctable package
+%- [list]% provide enhanced list environments for algorithm and coding
+%- [math]% enable some extra math packages
+\usepackage{Style/artracom}% user defined commands
+\usepackage{multirow}
+\usepackage{tabularx}
+\usepackage{listings}
+\lstset{
+ columns=fixed,
+ numbers=none, % 在左侧显示行号
+ numberstyle=\tiny\color{gray}, % 设定行号格式
+ frame=none, % 不显示背景边框
+ backgroundcolor=\color[RGB]{245,245,244}, % 设定背景颜色
+ keywordstyle=\color[RGB]{40,40,255}, % 设定关键字颜色
+ numberstyle=\footnotesize\color{darkgray},
+ commentstyle=\it\color[RGB]{0,96,96}, % 设置代码注释的格式
+ stringstyle=\rmfamily\slshape\color[RGB]{128,0,0}, % 设置字符串格式
+ showstringspaces=false, % 不显示字符串中的空格
+ language=c++, % 设置语言
+}
+%---------------------------------------------------------------------------%
+%->> Document inclusion
+%---------------------------------------------------------------------------%
+%\includeonly{Tex/Chap_1,...,Tex/Chap_N}% selected files compilation
+%---------------------------------------------------------------------------%
+%->> Document content
+%---------------------------------------------------------------------------%
+\begin{document}
+ %-
+ %-> Frontmatter: title page, abstract, content list, symbol list, preface
+ %-
+ \frontmatter% initialize the environment
+ \input{Tex/Frontmatter}% title page, abstract, dedication
+ {% content list region
+ \linespread{1.2}% local line space
+ %\intotoc{\contentsname}% add link to contents table and bookmark
+ \tableofcontents% contents catalog
+ %\intotoc{\listfigurename}% add link to contents table and bookmark
+ \listoffigures% figures catalog
+ %\intotoc{\listtablename}% add link to contents table and bookmark
+ \listoftables% tables catalog
+ }
+ %\input{Tex/Prematter}% list of symbols, preface content
+ %-
+ %-> Mainmatter
+ %-
+ \mainmatter% initialize the environment
+ \input{Tex/Mainmatter}% main content
+ %-
+ %-> Appendix
+ %-
+ \cleardoublepage%
+ \appendix% initialize the environment
+ %\input{Tex/Appendix}% appendix content
+ %-
+ %-> Backmatter: bibliography, glossary, index
+ %-
+ \backmatter% initialize the environment
+ \nocite{*}
+ \intotoc{\bibname}% add link to contents table and bookmark
+ \bibliography{Biblio/library}% bibliography
+ \input{Tex/Backmatter}% other information
+ %\bibliographystyle{unsrt}
+\end{document}
+%---------------------------------------------------------------------------% \ No newline at end of file
diff --git a/Tmp/Thesis.aux b/Tmp/Thesis.aux
new file mode 100644
index 0000000..3b252d7
--- /dev/null
+++ b/Tmp/Thesis.aux
@@ -0,0 +1,262 @@
+\relax
+\providecommand\hyper@newdestlabel[2]{}
+\bibstyle{Biblio/gbt7714-plain}
+\providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
+\HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
+\global\let\oldcontentsline\contentsline
+\gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
+\global\let\oldnewlabel\newlabel
+\gdef\newlabel#1#2{\newlabelxx{#1}#2}
+\gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
+\AtEndDocument{\ifx\hyper@anchor\@undefined
+\let\contentsline\oldcontentsline
+\let\newlabel\oldnewlabel
+\fi}
+\fi}
+\global\let\hyper@last\relax
+\gdef\HyperFirstAtBeginDocument#1{#1}
+\providecommand*\HyPL@Entry[1]{}
+\HyPL@Entry{0<</S/r>>}
+\HyPL@Entry{6<</S/R>>}
+\citation{CNNIC2018}
+\citation{PhenomenaReport}
+\citation{CiscoReport}
+\HyPL@Entry{16<</S/D>>}
+\@writefile{toc}{\contentsline {chapter}{\numberline {第1章\hspace {.3em}}绪论}{1}{chapter.1}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
+\newlabel{chap:introduction}{{1}{1}{绪论}{chapter.1}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {1.1}研究背景及意义}{1}{section.1.1}}
+\@writefile{lot}{\contentsline {table}{\numberline {1.1}{\ignorespaces 网民人数以及音视频用户人数\relax }}{1}{table.caption.6}}
+\providecommand*\caption@xref[2]{\@setref\relax\@undefined{#1}}
+\newlabel{tab:Numberofuser}{{1.1}{1}{网民人数以及音视频用户人数\relax }{table.caption.6}{}}
+\@writefile{lot}{\contentsline {table}{\numberline {1.2}{\ignorespaces 不同应用网络流量占比\relax }}{2}{table.caption.7}}
+\newlabel{tab:Differentapplication}{{1.2}{2}{不同应用网络流量占比\relax }{table.caption.7}{}}
+\@writefile{lot}{\contentsline {table}{\numberline {1.3}{\ignorespaces 美国下载流量排行\relax }}{2}{table.caption.8}}
+\newlabel{tab:Differentdownload}{{1.3}{2}{美国下载流量排行\relax }{table.caption.8}{}}
+\citation{Yu}
+\@writefile{lof}{\contentsline {figure}{\numberline {1.1}{\ignorespaces PACK网络冗余检测结果\relax }}{3}{figure.caption.9}}
+\newlabel{fig:youtube}{{1.1}{3}{PACK网络冗余检测结果\relax }{figure.caption.9}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {1.2}系统要求}{3}{section.1.2}}
+\citation{Spring2000}
+\citation{Zohar2014}
+\citation{Lei2013Cooperative}
+\citation{Aggarwal2010EndRE}
+\citation{Sanadhya2012}
+\citation{Halepovic2012}
+\citation{Hao2016Stochastic}
+\citation{Lumezanu2010The}
+\@writefile{toc}{\contentsline {section}{\numberline {1.3}国内外研究现状}{4}{section.1.3}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {1.3.1}研究现状概述}{4}{subsection.1.3.1}}
+\citation{Halepovic2012}
+\citation{Spring2000}
+\@writefile{toc}{\contentsline {subsection}{\numberline {1.3.2}数据链路层的网络冗余发现系统}{5}{subsection.1.3.2}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {1.3.3}网络层的网络冗余发现系统}{5}{subsection.1.3.3}}
+\citation{Yu}
+\citation{Aggarwal}
+\@writefile{lof}{\contentsline {figure}{\numberline {1.2}{\ignorespaces 基于内容分块原理\relax }}{6}{figure.caption.10}}
+\newlabel{fig:Content-based}{{1.2}{6}{基于内容分块原理\relax }{figure.caption.10}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {1.3.4}应用层的网络冗余发现系统}{6}{subsection.1.3.4}}
+\citation{Feng2013How}
+\@writefile{toc}{\contentsline {subsection}{\numberline {1.3.5}国内外研究现状小结}{7}{subsection.1.3.5}}
+\@writefile{lot}{\contentsline {table}{\numberline {1.4}{\ignorespaces 国内外研究现状小结\relax }}{8}{table.caption.11}}
+\newlabel{tab:Summaryofresearch}{{1.4}{8}{国内外研究现状小结\relax }{table.caption.11}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {1.4}论文的主要内容与组织结构}{9}{section.1.4}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {1.4.1}论文的主要内容}{9}{subsection.1.4.1}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {1.4.2}组织结构}{9}{subsection.1.4.2}}
+\@writefile{toc}{\contentsline {section}{\numberline {1.5}小结}{9}{section.1.5}}
+\citation{aoli2010}
+\@writefile{toc}{\contentsline {chapter}{\numberline {第2章\hspace {.3em}}前人的工作基础}{11}{chapter.2}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
+\newlabel{chap:introduction}{{2}{11}{前人的工作基础}{chapter.2}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {2.1}使用流式模糊哈希的原因及其优点}{11}{section.2.1}}
+\newlabel{eq:chufa}{{2.1}{12}{使用流式模糊哈希的原因及其优点}{equation.2.1.1}{}}
+\newlabel{eq:rollinghash}{{2.2}{12}{使用流式模糊哈希的原因及其优点}{equation.2.1.2}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {2.1}{\ignorespaces 多线程传输场景\relax }}{13}{figure.caption.12}}
+\newlabel{fig:Multi-threaded}{{2.1}{13}{多线程传输场景\relax }{figure.caption.12}{}}
+\citation{Chen2008Efficient}
+\@writefile{toc}{\contentsline {section}{\numberline {2.2}使用基于流式模糊哈希的相似性查找系统的原因及优点}{14}{section.2.2}}
+\newlabel{eq:cfds}{{2.3}{14}{使用基于流式模糊哈希的相似性查找系统的原因及优点}{equation.2.2.3}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {2.3}小结}{15}{section.2.3}}
+\@writefile{toc}{\contentsline {chapter}{\numberline {第3章\hspace {.3em}}预测性文件标识生成方法}{17}{chapter.3}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
+\newlabel{chap:introduction}{{3}{17}{预测性文件标识生成方法}{chapter.3}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {3.1}背景知识}{17}{section.3.1}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.1}决策树}{17}{subsection.3.1.1}}
+\@writefile{lof}{\contentsline {figure}{\numberline {3.1}{\ignorespaces 典型的决策树结构图\relax }}{17}{figure.caption.13}}
+\newlabel{fig:Typicaldecision}{{3.1}{17}{典型的决策树结构图\relax }{figure.caption.13}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.2}贝叶斯分类方法}{18}{subsection.3.1.2}}
+\newlabel{eq:phx}{{3.1}{18}{贝叶斯分类方法}{equation.3.1.1}{}}
+\newlabel{eq:pxc}{{3.2}{19}{贝叶斯分类方法}{equation.3.1.2}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.3}URL}{19}{subsection.3.1.3}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.4}HTTP缓存}{19}{subsection.3.1.4}}
+\@writefile{lof}{\contentsline {figure}{\numberline {3.2}{\ignorespaces 用户发出第一次请求\relax }}{20}{figure.caption.14}}
+\newlabel{fig:Flowchartofrequest_fi}{{3.2}{20}{用户发出第一次请求\relax }{figure.caption.14}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.5}特征选择}{20}{subsection.3.1.5}}
+\@writefile{lof}{\contentsline {figure}{\numberline {3.3}{\ignorespaces 用户再次请求\relax }}{21}{figure.caption.15}}
+\newlabel{fig:Flowchartofrequest_ag}{{3.3}{21}{用户再次请求\relax }{figure.caption.15}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {3.4}{\ignorespaces 特征选择的框架\relax }}{21}{figure.caption.16}}
+\newlabel{fig:Frameworkforfeature}{{3.4}{21}{特征选择的框架\relax }{figure.caption.16}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.6}信息熵}{22}{subsection.3.1.6}}
+\newlabel{eq:H(U)}{{3.3}{22}{信息熵}{equation.3.1.3}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.7}互信息}{22}{subsection.3.1.7}}
+\newlabel{eq:I(X;Y)1}{{3.4}{22}{互信息}{equation.3.1.4}{}}
+\newlabel{eq:I(X;Y)2)}{{3.5}{22}{互信息}{equation.3.1.5}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {3.2}重复音视频文件预测实验}{23}{section.3.2}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.1}实验思路}{23}{subsection.3.2.1}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.2}机器学习算法选择}{23}{subsection.3.2.2}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.3}实验数据}{23}{subsection.3.2.3}}
+\@writefile{lof}{\contentsline {figure}{\numberline {3.5}{\ignorespaces 所有音视频大小分布\relax }}{24}{figure.caption.17}}
+\newlabel{fig:Allaudio}{{3.5}{24}{所有音视频大小分布\relax }{figure.caption.17}{}}
+\@writefile{lot}{\contentsline {table}{\numberline {3.1}{\ignorespaces 数据集特征\relax }}{24}{table.caption.18}}
+\newlabel{tab:DatasetFeatures}{{3.1}{24}{数据集特征\relax }{table.caption.18}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {3.6}{\ignorespaces 完整度80\%音视频大小分布\relax }}{25}{figure.caption.19}}
+\newlabel{fig:80sizedistribution}{{3.6}{25}{完整度80\%音视频大小分布\relax }{figure.caption.19}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.4}数据标注}{25}{subsection.3.2.4}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.5}机器学习库}{25}{subsection.3.2.5}}
+\citation{Sahoo2017Malicious}
+\citation{Tan2018}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.6}特征量化}{26}{subsection.3.2.6}}
+\@writefile{lot}{\contentsline {table}{\numberline {3.2}{\ignorespaces 所选特征和其特征量化方式\relax }}{26}{table.caption.20}}
+\newlabel{tab:Selectedfeatures}{{3.2}{26}{所选特征和其特征量化方式\relax }{table.caption.20}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.7}实验结果}{26}{subsection.3.2.7}}
+\@writefile{toc}{\contentsline {section}{\numberline {3.3}预测性文件标识生成}{27}{section.3.3}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.1}步骤}{27}{subsection.3.3.1}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.2}特征选择评价标准}{27}{subsection.3.3.2}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.3}实验数据}{27}{subsection.3.3.3}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.4}音视频特征的信息熵}{28}{subsection.3.3.4}}
+\@writefile{lot}{\contentsline {table}{\numberline {3.3}{\ignorespaces 特征信息熵结果\relax }}{28}{table.caption.21}}
+\newlabel{tab:CharacteristicInformation}{{3.3}{28}{特征信息熵结果\relax }{table.caption.21}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.5}音视频特征的互信息}{28}{subsection.3.3.5}}
+\@writefile{lot}{\contentsline {table}{\numberline {3.4}{\ignorespaces 特征互信息结果\relax }}{29}{table.caption.22}}
+\newlabel{tab:Featuremutual}{{3.4}{29}{特征互信息结果\relax }{table.caption.22}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {3.4}小结}{29}{section.3.4}}
+\@writefile{toc}{\contentsline {chapter}{\numberline {第4章\hspace {.3em}}基于流式模糊哈希的重复音视频检测方法}{31}{chapter.4}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
+\newlabel{chap:introduction}{{4}{31}{基于流式模糊哈希的重复音视频检测方法}{chapter.4}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {4.1}背景知识}{31}{section.4.1}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {4.1.1}单向流}{31}{subsection.4.1.1}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {4.1.2}流式模糊哈希相似不具有传递性}{31}{subsection.4.1.2}}
+\@writefile{toc}{\contentsline {section}{\numberline {4.2}基于流式模糊哈希的重复音视频检测方法}{31}{section.4.2}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.1}步骤}{31}{subsection.4.2.1}}
+\@writefile{lof}{\contentsline {figure}{\numberline {4.1}{\ignorespaces 基于流式模糊哈希的重复音视频检测方法流程图\relax }}{32}{figure.caption.23}}
+\newlabel{fig:Flowchartofpredictiveprogram}{{4.1}{32}{基于流式模糊哈希的重复音视频检测方法流程图\relax }{figure.caption.23}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.2}音视频文件编号}{32}{subsection.4.2.2}}
+\@writefile{lof}{\contentsline {figure}{\numberline {4.2}{\ignorespaces 音视频文件编号流程图\relax }}{33}{figure.caption.24}}
+\newlabel{fig:numberflow}{{4.2}{33}{音视频文件编号流程图\relax }{figure.caption.24}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {4.2.3}预测性文件标识评价}{33}{subsection.4.2.3}}
+\@writefile{toc}{\contentsline {section}{\numberline {4.3}重复音视频检测方法的可行性验证}{33}{section.4.3}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.1}预测性文件标识的误报率}{34}{subsection.4.3.1}}
+\@writefile{lot}{\contentsline {table}{\numberline {4.1}{\ignorespaces 误报率结果\relax }}{34}{table.caption.25}}
+\newlabel{tab:Resultsoffalse}{{4.1}{34}{误报率结果\relax }{table.caption.25}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {4.3.2}预测性文件标识的漏报率}{34}{subsection.4.3.2}}
+\@writefile{lot}{\contentsline {table}{\numberline {4.2}{\ignorespaces 漏报率结果\relax }}{34}{table.caption.26}}
+\newlabel{tab:ResultsofMissing}{{4.2}{34}{漏报率结果\relax }{table.caption.26}{}}
+\@writefile{lot}{\contentsline {table}{\numberline {4.3}{\ignorespaces 漏报率结果\relax }}{35}{table.caption.27}}
+\newlabel{tab:PredictiveDocument}{{4.3}{35}{漏报率结果\relax }{table.caption.27}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {4.4}小结}{35}{section.4.4}}
+\@writefile{toc}{\contentsline {chapter}{\numberline {第5章\hspace {.3em}}系统设计与实现}{37}{chapter.5}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
+\newlabel{chap:introduction}{{5}{37}{系统设计与实现}{chapter.5}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {5.1}系统概述}{37}{section.5.1}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.1}{\ignorespaces 面向网络流量的重复音视频实时检测系统框架图\relax }}{37}{figure.caption.28}}
+\newlabel{fig:systemframework}{{5.1}{37}{面向网络流量的重复音视频实时检测系统框架图\relax }{figure.caption.28}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {5.2}系统测试}{37}{section.5.2}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {5.2.1}测试原理}{37}{subsection.5.2.1}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.2}{\ignorespaces 系统测试原理\relax }}{38}{figure.caption.29}}
+\newlabel{fig:test}{{5.2}{38}{系统测试原理\relax }{figure.caption.29}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {5.2.2}特定音视频文件召回率测试}{38}{subsection.5.2.2}}
+\@writefile{lot}{\contentsline {table}{\numberline {5.1}{\ignorespaces 内容分析模块特定音视频文件召回率\relax }}{38}{table.caption.30}}
+\newlabel{tab:contentanalysis}{{5.1}{38}{内容分析模块特定音视频文件召回率\relax }{table.caption.30}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {5.2.3}系统去重效果测试}{38}{subsection.5.2.3}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.3}{\ignorespaces 完整度80\%文件数去重\relax }}{39}{figure.caption.31}}
+\newlabel{fig:result_a}{{5.3}{39}{完整度80\%文件数去重\relax }{figure.caption.31}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.4}{\ignorespaces 完整度95\%文件数去重\relax }}{39}{figure.caption.32}}
+\newlabel{fig:result_b}{{5.4}{39}{完整度95\%文件数去重\relax }{figure.caption.32}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.5}{\ignorespaces 完整度80\%文件带宽去重\relax }}{40}{figure.caption.33}}
+\newlabel{fig:result_c}{{5.5}{40}{完整度80\%文件带宽去重\relax }{figure.caption.33}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.6}{\ignorespaces 完整度95\%文件带宽去重\relax }}{40}{figure.caption.34}}
+\newlabel{fig:result_d}{{5.6}{40}{完整度95\%文件带宽去重\relax }{figure.caption.34}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {5.3}小结}{41}{section.5.3}}
+\citation{Lumezanu2010The}
+\citation{Halepovic2012}
+\citation{Spring2000}
+\citation{Yu}
+\citation{Aggarwal}
+\citation{Feng2013How}
+\@writefile{toc}{\contentsline {chapter}{\numberline {第6章\hspace {.3em}}总结与展望}{43}{chapter.6}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
+\newlabel{chap:introduction}{{6}{43}{总结与展望}{chapter.6}{}}
+\@writefile{toc}{\contentsline {section}{\numberline {6.1}全文总结}{43}{section.6.1}}
+\@writefile{toc}{\contentsline {section}{\numberline {6.2}研究展望}{44}{section.6.2}}
+\citation{*}
+\bibdata{Biblio/library}
+\bibcite{CNNIC2018}{{1}{2018}{{CNNIC}}{{}}}
+\bibcite{aoli2010}{{2}{2010}{{敖莉\ 等}}{{敖莉, 舒继武, and 李明强}}}
+\bibcite{Aggarwal}{{3}{}{{Aggarwal {等}}}{{Aggarwal, Akella, Anand, and Balachandran}}}
+\bibcite{Aggarwal2010EndRE}{{4}{2010}{{Aggarwal {等}}}{{Aggarwal, Akella, Anand, Balachandran, Chitnis, Muthukrishnan, Ramjee, and Varghese}}}
+\bibcite{Anand2009}{{5}{2009}{{Anand {等}}}{{Anand, Muthukrishnan, Akella, and Ramjee}}}
+\bibcite{Anand2010Cheap}{{6}{2010}{{Anand {等}}}{{Anand, Muthukrishnan, Kappes, Akella, and Nath}}}
+\bibcite{Cao2017}{{7}{2017}{{Cao {等}}}{{Cao, Li, and Wijmans}}}
+\bibcite{Chen2016}{{8}{2016{a}}{{Chen {等}}}{{Chen, Weaver, and Wan}}}
+\bibcite{Chen2008Efficient}{{9}{2008}{{Chen {等}}}{{Chen, Lu, and Lu}}}
+\bibcite{Chen2016a}{{10}{2016{b}}{{Chen {等}}}{{Chen, He, Hua, and Wang}}}
+\bibcite{Chung2018}{{11}{2018}{{Chung {和} Claypool}}{{Chung and Claypool}}}
+\bibcite{CiscoReport}{{12}{2018}{{cisco}}{{}}}
+\bibcite{Feng}{{13}{}{{Feng}}{{}}}
+\bibcite{Feng2013How}{{14}{2013}{{Feng {等}}}{{Feng, Huang, Erman, Mao, Sen, and Spatscheck}}}
+\bibcite{Ge2014}{{15}{2014}{{Ge {等}}}{{Ge, Liu, Du, Zhang, Guan, Chen, Zhao, and Hu}}}
+\bibcite{Halepovic2012}{{16}{2012}{{Halepovic {等}}}{{Halepovic, Ghaderi, and Williamson}}}
+\bibcite{Halepovic2013On}{{17}{2013}{{Halepovic {等}}}{{Halepovic, Ghaderi, and Williamson}}}
+\bibcite{Hao2016Stochastic}{{18}{2016}{{Hao {等}}}{{Hao, Mu, Hong, Meng, and Goulermas}}}
+\@writefile{toc}{\contentsline {chapter}{参考文献}{47}{section*.35}}
+\bibcite{Hao2017}{{19}{2017}{{Hao {等}}}{{Hao, Mu, Hong, Wang, An, and Goulermas}}}
+\bibcite{Hua2013}{{20}{2013}{{Hua {等}}}{{Hua, Xiao, and Liu}}}
+\bibcite{Kornblum2006}{{21}{2006}{{Kornblum}}{{}}}
+\bibcite{Lei2013Cooperative}{{22}{2013}{{Lei {等}}}{{Lei, Sapra, Shen, and Lin}}}
+\bibcite{Li2017a}{{23}{2017{a}}{{Li {等}}}{{Li, Cheng, Wang, Morstatter, Trevino, Tang, and Liu}}}
+\bibcite{Li2014}{{24}{2014}{{Li {等}}}{{Li, Schliep, and Hopper}}}
+\bibcite{Li2017}{{25}{2017{b}}{{Li {等}}}{{Li, Zheng, Zhang, Li, Guo, and Xu}}}
+\bibcite{Lumezanu2010}{{26}{2010{a}}{{Lumezanu {等}}}{{Lumezanu, Guo, Spring, and Bhattacharjee}}}
+\bibcite{Lumezanu2010The}{{27}{2010{b}}{{Lumezanu {等}}}{{Lumezanu, Guo, Spring, and Bhattacharjee}}}
+\bibcite{Ma2009}{{28}{2009}{{Ma {等}}}{{Ma, Saul, Savage, and Voelker}}}
+\bibcite{Mogul2004a}{{29}{2004}{{Mogul {等}}}{{Mogul, Chan, and Kelly}}}
+\bibcite{Mohammad2013}{{30}{2013}{{Mohammad {等}}}{{Mohammad, Thabtah, and Mccluskey}}}
+\bibcite{Reed2016}{{31}{2016}{{Reed {和} Klimkowski}}{{Reed and Klimkowski}}}
+\bibcite{Reed2017}{{32}{2017}{{Reed {和} Kranch}}{{Reed and Kranch}}}
+\bibcite{Rhea2003}{{33}{2003}{{Rhea {等}}}{{Rhea, Liang, and Brewer}}}
+\bibcite{Rimmer2017}{{34}{2017}{{Rimmer {等}}}{{Rimmer, Preuveneers, Juarez, {Van Goethem}, and Joosen}}}
+\bibcite{Sahoo2017}{{35}{2017{a}}{{Sahoo {等}}}{{Sahoo, Liu, and Hoi}}}
+\bibcite{Sahoo2017Malicious}{{36}{2017{b}}{{Sahoo {等}}}{{Sahoo, Liu, and Hoi}}}
+\bibcite{Sanadhya2012}{{37}{2012}{{Sanadhya {等}}}{{Sanadhya, Sivakumar, Kim, Congdon, Lakshmanan, and Singh}}}
+\bibcite{PhenomenaReport}{{38}{2018}{{sandvine}}{{}}}
+\bibcite{Shih2004}{{39}{2004}{{Shih {和} Karger}}{{Shih and Karger}}}
+\bibcite{Song2011}{{40}{2011}{{Song {等}}}{{Song, Yang, and Huang}}}
+\bibcite{Spring2000}{{41}{2000}{{Spring {和} Wetherall}}{{Spring and Wetherall}}}
+\bibcite{Store2013}{{42}{2013}{{Store {和} Algorithm}}{{Store and Algorithm}}}
+\bibcite{Tan2018}{{43}{2018}{{Tan {等}}}{{Tan, Zhang, Liu, Liu, Zhu, and Dou}}}
+\bibcite{Tang2017}{{44}{2017}{{Tang {和} Wang}}{{Tang and Wang}}}
+\bibcite{Vastel2018a}{{45}{2018}{{Vastel {等}}}{{Vastel, Inria, Laperdrix, Rudametkin, Inria, Rouvoy, Iuf, Vastel, Laperdrix, and Rouvoy}}}
+\bibcite{Wang2014}{{46}{2014}{{Wang {等}}}{{Wang, Zhao, Hoi, and Jin}}}
+\bibcite{Wang2015}{{47}{2015}{{Wang {等}}}{{Wang, Li, Deng, Zhang, and Feng}}}
+\bibcite{Wei}{{48}{}{{Wei {等}}}{{Wei, Jiang, Zhou, and Feng}}}
+\bibcite{Winter2013}{{49}{2013}{{Winter {等}}}{{Winter, Schneider, and Yannikos}}}
+\bibcite{Wu}{{50}{}{{Wu {等}}}{{Wu, Yu, and Wolf}}}
+\bibcite{Xiao2017}{{51}{2017}{{Xiao {等}}}{{Xiao, Xue, Miao, Li, Chen, Wu, Li, and Zhou}}}
+\bibcite{Yu2004a}{{52}{2004}{{Yu {和} Liu}}{{Yu and Liu}}}
+\bibcite{Yu}{{53}{}{{Yu {等}}}{{Yu, Sapra, Shen, and Ye}}}
+\bibcite{Zhang2018}{{54}{2018}{{Zhang {等}}}{{Zhang, Jin, Ratnasamy, Wawrzynek, and Lee}}}
+\bibcite{Zhang2017}{{55}{2017}{{Zhang {等}}}{{Zhang, Ananthanarayanan, Bodik, Philipose, Bahl, and Freedman}}}
+\bibcite{zheng2018hashing}{{56}{2018}{{Zheng {等}}}{{Zheng, Li, Liu, Sun, and Fang}}}
+\bibcite{Zohar2014}{{57}{2014}{{Zohar {等}}}{{Zohar, Cidon, and Mokryn}}}
+\@writefile{toc}{\contentsline {chapter}{致谢}{51}{appendix*.37}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
+\@writefile{toc}{\contentsline {chapter}{作者简历及攻读学位期间发表的学术论文与研究成果}{53}{appendix*.38}}
+\@writefile{lof}{\addvspace {10.0pt}}
+\@writefile{lot}{\addvspace {10.0pt}}
diff --git a/Tmp/Thesis.bbl b/Tmp/Thesis.bbl
new file mode 100644
index 0000000..c28f557
--- /dev/null
+++ b/Tmp/Thesis.bbl
@@ -0,0 +1,401 @@
+\begin{thebibliography}{57}
+\providecommand{\natexlab}[1]{#1}
+\providecommand{\url}[1]{#1}
+\expandafter\ifx\csname urlstyle\endcsname\relax\relax\else
+ \urlstyle{same}\fi
+\providecommand{\href}[2]{\url{#2}}
+\providecommand{\doi}[1]{\href{https://doi.org/#1}{#1}}
+
+\bibitem[{CNNIC(2018)}]{CNNIC2018}
+CNNIC.
+\newblock 中国互联网络发展状况统计报告\allowbreak[R/OL].
+\newblock 2018.
+\newblock \url{http://www.cac.gov.cn/2018-08/20/c\_1123296882.htm}.
+
+\bibitem[{敖莉\ 等(2010)敖莉, 舒继武, and 李明强}]{aoli2010}
+敖莉, 舒继武, 李明强.
+\newblock 重复数据删除技术\allowbreak[J].
+\newblock 软件学报, 2010, 21\penalty0 (5):\penalty0 916-929.
+
+\bibitem[{Aggarwal {\bibetal}()Aggarwal, Akella, Anand, and
+ Balachandran}]{Aggarwal}
+AGGARWAL~B, AKELLA~A, ANAND~A, et~al.
+\newblock {EndRE : An End-System Redundancy Elimination Service for
+ Enterprises}\allowbreak[J].
+
+\bibitem[{Aggarwal {\bibetal}(2010)Aggarwal, Akella, Anand, Balachandran,
+ Chitnis, Muthukrishnan, Ramjee, and Varghese}]{Aggarwal2010EndRE}
+AGGARWAL~B, AKELLA~A, ANAND~A, et~al.
+\newblock Endre: An end-system redundancy elimination service for
+ enterprises.\allowbreak[J].
+\newblock 2010.
+
+\bibitem[{Anand {\bibetal}(2009)Anand, Muthukrishnan, Akella, and
+ Ramjee}]{Anand2009}
+ANAND~A, MUTHUKRISHNAN~C, AKELLA~A, et~al.
+\newblock {Redundancy in network traffic: findings and
+ implications}\allowbreak[J/OL].
+\newblock Proceedings of SIGMETRICS/Performance'09, 2009:\penalty0 37-48.
+\newblock \url{http://portal.acm.org/citation.cfm?id=1555355}.
+
+\bibitem[{Anand {\bibetal}(2010)Anand, Muthukrishnan, Kappes, Akella, and
+ Nath}]{Anand2010Cheap}
+ANAND~A, MUTHUKRISHNAN~C, KAPPES~S, et~al.
+\newblock Cheap and large cams for high performance data-intensive networked
+ systems\allowbreak[J].
+\newblock 2010.
+
+\bibitem[{Cao {\bibetal}(2017)Cao, Li, and Wijmans}]{Cao2017}
+CAO~Y, LI~S, WIJMANS~E.
+\newblock {( Cross- ) Browser Fingerprinting via OS and Hardware Level
+ Features}\allowbreak[J].
+\newblock Proceedings of Network {\&} Distributed System Security Symposium
+ (NDSS), 2017\penalty0 (March).
+
+\bibitem[{Chen {\bibetal}(2016{\natexlab{a}})Chen, Weaver, and Wan}]{Chen2016}
+CHEN~J, WEAVER~N, WAN~T.
+\newblock {Host of Troubles : Multiple Host Ambiguities in HTTP
+ Implementations}\allowbreak[J].
+\newblock Ccs, 2016:\penalty0 1516-1527.
+
+\bibitem[{Chen {\bibetal}(2008)Chen, Lu, and Lu}]{Chen2008Efficient}
+CHEN~L, LU~J, LU~Y.
+\newblock Efficient merging and filtering algorithms for approximate string
+ searches\allowbreak[C]//\allowbreak
+IEEE International Conference on Data Engineering.
+\newblock 2008.
+
+\bibitem[{Chen {\bibetal}(2016{\natexlab{b}})Chen, He, Hua, and
+ Wang}]{Chen2016a}
+CHEN~Y, HE~W, HUA~Y, et~al.
+\newblock {CompoundEyes: Near-duplicate detection in large scale online video
+ systems in the cloud}\allowbreak[J].
+\newblock Proceedings - IEEE INFOCOM, 2016, 2016-July.
+
+\bibitem[{Chung {\biband} Claypool(2018)Chung and Claypool}]{Chung2018}
+CHUNG~J~W, CLAYPOOL~M.
+\newblock {Silhoue e – Identifying YouTube Video Flows from Encrypted Tra
+ ic}\allowbreak[J].
+\newblock Nossdav 2018, 2018.
+
+\bibitem[{cisco(2018)}]{CiscoReport}
+CISCO.
+\newblock visual-networking-index-vni\allowbreak[R/OL].
+\newblock 2018.
+\newblock
+ \url{https://www.cisco.com/c/en/us/solutions/collateral/service-provider/visual-networking-index-vni/white-paper-c11-741490.html}.
+
+\bibitem[{Feng()}]{Feng}
+FENG~D.
+\newblock {Smart In-Network Deduplication for Storage-aware SDN}\allowbreak[J].
+\newblock :\penalty0 509-510.
+
+\bibitem[{Feng {\bibetal}(2013)Feng, Huang, Erman, Mao, Sen, and
+ Spatscheck}]{Feng2013How}
+FENG~Q, HUANG~J, ERMAN~J, et~al.
+\newblock How to reduce smartphone traffic volume by 30%?\allowbreak[J].
+\newblock 2013.
+
+\bibitem[{Ge {\bibetal}(2014)Ge, Liu, Du, Zhang, Guan, Chen, Zhao, and
+ Hu}]{Ge2014}
+GE~X, LIU~Y, DU~D~H, et~al.
+\newblock {OpenANFV}\allowbreak[J/OL].
+\newblock Proceedings of the 2014 ACM conference on SIGCOMM - SIGCOMM '14,
+ 2014:\penalty0 353-354.
+\newblock \url{http://dl.acm.org/citation.cfm?doid=2619239.2631426}.
+
+\bibitem[{Halepovic {\bibetal}(2012)Halepovic, Ghaderi, and
+ Williamson}]{Halepovic2012}
+HALEPOVIC~E, GHADERI~M, WILLIAMSON~C.
+\newblock {On the performance of redundant traffic elimination in
+ WLANs}\allowbreak[J].
+\newblock IEEE International Conference on Communications, 2012:\penalty0
+ 5434-5439.
+
+\bibitem[{Halepovic {\bibetal}(2013)Halepovic, Ghaderi, and
+ Williamson}]{Halepovic2013On}
+HALEPOVIC~E, GHADERI~M, WILLIAMSON~C.
+\newblock On the performance of redundant traffic elimination in
+ wlans\allowbreak[C]//\allowbreak
+IEEE International Conference on Communications.
+\newblock 2013.
+
+\bibitem[{Hao {\bibetal}(2016)Hao, Mu, Hong, Meng, and
+ Goulermas}]{Hao2016Stochastic}
+HAO~Y, MU~T, HONG~R, et~al.
+\newblock Stochastic multiview hashing for large-scale near-duplicate video
+ retrieval\allowbreak[J].
+\newblock IEEE Transactions on Multimedia, 2016, PP\penalty0 (99):\penalty0
+ 1-1.
+
+\bibitem[{Hao {\bibetal}(2017)Hao, Mu, Hong, Wang, An, and Goulermas}]{Hao2017}
+HAO~Y, MU~T, HONG~R, et~al.
+\newblock {Stochastic Multiview Hashing for Large-Scale Near-Duplicate Video
+ Retrieval}\allowbreak[J].
+\newblock IEEE Transactions on Multimedia, 2017, 19\penalty0 (1):\penalty0
+ 1-14.
+
+\bibitem[{Hua {\bibetal}(2013)Hua, Xiao, and Liu}]{Hua2013}
+HUA~Y, XIAO~B, LIU~X.
+\newblock {NEST: Locality-aware approximate query service for cloud
+ computing}\allowbreak[J].
+\newblock Proceedings - IEEE INFOCOM, 2013:\penalty0 1303-1311.
+
+\bibitem[{Kornblum(2006)}]{Kornblum2006}
+KORNBLUM~J.
+\newblock {Identifying almost identical files using context triggered piecewise
+ hashing}\allowbreak[J].
+\newblock Digital Investigation, 2006, 3\penalty0 (SUPPL.):\penalty0 91-97.
+
+\bibitem[{Lei {\bibetal}(2013)Lei, Sapra, Shen, and Lin}]{Lei2013Cooperative}
+LEI~Y, SAPRA~K, SHEN~H, et~al.
+\newblock Cooperative end-to-end traffic redundancy elimination for reducing
+ cloud bandwidth cost\allowbreak[J].
+\newblock 2013.
+
+\bibitem[{Li {\bibetal}(2017{\natexlab{a}})Li, Cheng, Wang, Morstatter,
+ Trevino, Tang, and Liu}]{Li2017a}
+LI~J, CHENG~K, WANG~S, et~al.
+\newblock {Feature Selection: A Data Perspective}\allowbreak[J].
+\newblock ACM Computing Surveys, 2017, 50\penalty0 (6):\penalty0 1-45.
+
+\bibitem[{Li {\bibetal}(2014)Li, Schliep, and Hopper}]{Li2014}
+LI~S, SCHLIEP~M, HOPPER~N.
+\newblock {Facet: Streaming over Videoconferencing for Censorship
+ Circumvention.}\allowbreak[J/OL].
+\newblock Wpes, 2014:\penalty0 163-172.
+\newblock
+ \url{http://dl.acm.org/citation.cfm?doid=2665943.2665944{\%}0Ahttp://doi.acm.org/10.1145/2665943.2665944{\%}5Cnpapers3://publication/doi/10.1145/2665943.2665944}.
+
+\bibitem[{Li {\bibetal}(2017{\natexlab{b}})Li, Zheng, Zhang, Li, Guo, and
+ Xu}]{Li2017}
+LI~X, ZHENG~C, ZHANG~C, et~al.
+\newblock {AppTwins: A new approach to identify app package in network
+ traffic}\allowbreak[J].
+\newblock 2017 8th International Conference on Information and Communication
+ Systems, ICICS 2017, 2017:\penalty0 222-227.
+
+\bibitem[{Lumezanu {\bibetal}(2010{\natexlab{a}})Lumezanu, Guo, Spring, and
+ Bhattacharjee}]{Lumezanu2010}
+LUMEZANU~C, GUO~K, SPRING~N, et~al.
+\newblock {The effect of packet loss on redundancy elimination in cellular
+ wireless networks}\allowbreak[J/OL].
+\newblock Proceedings of ACM IMC, 2010\penalty0 (January):\penalty0 294.
+\newblock \url{http://dl.acm.org/citation.cfm?id=1879141.1879179}.
+
+\bibitem[{Lumezanu {\bibetal}(2010{\natexlab{b}})Lumezanu, Guo, Spring, and
+ Bhattacharjee}]{Lumezanu2010The}
+LUMEZANU~C, GUO~K, SPRING~N, et~al.
+\newblock The effect of packet loss on redundancy elimination in cellular
+ wireless networks\allowbreak[J].
+\newblock 2010.
+
+\bibitem[{Ma {\bibetal}(2009)Ma, Saul, Savage, and Voelker}]{Ma2009}
+MA~J, SAUL~L~K, SAVAGE~S, et~al.
+\newblock {Beyond Blacklists : Learning to Detect Malicious Web Sites from
+ Suspicious URLs}\allowbreak[J/OL].
+\newblock World Wide Web Internet And Web Information Systems, 2009:\penalty0
+ 1245-1253.
+\newblock \url{http://portal.acm.org/citation.cfm?id=1557153}.
+
+\bibitem[{Mogul {\bibetal}(2004)Mogul, Chan, and Kelly}]{Mogul2004a}
+MOGUL~J~C, CHAN~Y~M, KELLY~T.
+\newblock {Design, implementation, and evaluation of duplicate transfer
+ detection in HTTP}\allowbreak[J].
+\newblock Proceedings of USENIX NSDI, 2004\penalty0 (March):\penalty0 4.
+
+\bibitem[{Mohammad {\bibetal}(2013)Mohammad, Thabtah, and
+ Mccluskey}]{Mohammad2013}
+MOHAMMAD~R~M, THABTAH~F, MCCLUSKEY~L.
+\newblock {Phishing Websites Features}\allowbreak[J/OL].
+\newblock Ieee, 2013\penalty0 (May):\penalty0 1-7.
+\newblock
+ \url{papers3://publication/uuid/6A553382-D05D-48FA-97AA-382C3203BB1F}.
+
+\bibitem[{Reed {\biband} Klimkowski(2016)Reed and Klimkowski}]{Reed2016}
+REED~A, KLIMKOWSKI~B.
+\newblock {Leaky Streams}\allowbreak[J].
+\newblock 2016:\penalty0 1114-1119.
+
+\bibitem[{Reed {\biband} Kranch(2017)Reed and Kranch}]{Reed2017}
+REED~A, KRANCH~M.
+\newblock {Identifying HTTPS-Protected Netflix Videos in
+ Real-Time}\allowbreak[J/OL].
+\newblock Proceedings of the Seventh ACM on Conference on Data and Application
+ Security and Privacy - CODASPY '17, 2017:\penalty0 361-368.
+\newblock \url{http://dl.acm.org/citation.cfm?doid=3029806.3029821}.
+
+\bibitem[{Rhea {\bibetal}(2003)Rhea, Liang, and Brewer}]{Rhea2003}
+RHEA~S~C, LIANG~K, BREWER~E.
+\newblock {Value-Based Web Caching}\allowbreak[J].
+\newblock 2003\penalty0 (May).
+
+\bibitem[{Rimmer {\bibetal}(2017)Rimmer, Preuveneers, Juarez, {Van Goethem},
+ and Joosen}]{Rimmer2017}
+RIMMER~V, PREUVENEERS~D, JUAREZ~M, et~al.
+\newblock {Automated Website Fingerprinting through Deep
+ Learning}\allowbreak[J/OL].
+\newblock 2017.
+\newblock
+ \url{http://arxiv.org/abs/1708.06376{\%}0Ahttp://dx.doi.org/10.14722/ndss.2018.23105}.
+
+\bibitem[{Sahoo {\bibetal}(2017{\natexlab{a}})Sahoo, Liu, and Hoi}]{Sahoo2017}
+SAHOO~D, LIU~C, HOI~S~C~H.
+\newblock {Malicious URL Detection using Machine Learning: A
+ Survey}\allowbreak[J/OL].
+\newblock 2017:\penalty0 1-21.
+\newblock \url{http://arxiv.org/abs/1701.07179}.
+
+\bibitem[{Sahoo {\bibetal}(2017{\natexlab{b}})Sahoo, Liu, and
+ Hoi}]{Sahoo2017Malicious}
+SAHOO~D, LIU~C, HOI~S~C~H.
+\newblock Malicious url detection using machine learning: A
+ survey\allowbreak[J].
+\newblock 2017.
+
+\bibitem[{Sanadhya {\bibetal}(2012)Sanadhya, Sivakumar, Kim, Congdon,
+ Lakshmanan, and Singh}]{Sanadhya2012}
+SANADHYA~S, SIVAKUMAR~R, KIM~K~H, et~al.
+\newblock {Asymmetric Caching: Improved Network Deduplication for Mobile
+ Devices}\allowbreak[J/OL].
+\newblock Mobicom, 2012:\penalty0 161.
+\newblock
+ \url{http://dl.acm.org/citation.cfm?id=2348565{\%}5Cnhttp://dl.acm.org/citation.cfm?doid=2348543.2348565}.
+
+\bibitem[{sandvine(2018)}]{PhenomenaReport}
+SANDVINE.
+\newblock 2018-internet-phenomena-report\allowbreak[R/OL].
+\newblock 2018.
+\newblock \url{https://www.sandvine.com/2018-internet-phenomena-report}.
+
+\bibitem[{Shih {\biband} Karger(2004)Shih and Karger}]{Shih2004}
+SHIH~L~K, KARGER~D~R.
+\newblock {Using urls and table layout for web classification
+ tasks}\allowbreak[J/OL].
+\newblock Proceedings of the 13th conference on World Wide Web - WWW '04,
+ 2004:\penalty0 193.
+\newblock \url{http://portal.acm.org/citation.cfm?doid=988672.988699}.
+
+\bibitem[{Song {\bibetal}(2011)Song, Yang, and Huang}]{Song2011}
+SONG~J, YANG~Y, HUANG~Z.
+\newblock {Multiple feature hashing for real-time large scale near-duplicate
+ video retrieval}\allowbreak[J/OL].
+\newblock Proceedings of the 19th {\ldots}, 2011:\penalty0 423-432.
+\newblock \url{http://dl.acm.org/citation.cfm?id=2072354}.
+
+\bibitem[{Spring {\biband} Wetherall(2000)Spring and Wetherall}]{Spring2000}
+SPRING~N~T, WETHERALL~D.
+\newblock {A protocol-independent technique for eliminating redundant network
+ traffic}\allowbreak[J].
+\newblock Sigcomm, 2000, 30\penalty0 (4):\penalty0 87-95.
+
+\bibitem[{Store {\biband} Algorithm(2013)Store and Algorithm}]{Store2013}
+STORE~A~R~C, ALGORITHM~B~R.
+\newblock {Advanced Prediction-Based System for Cloud Bandwidth and Cost
+ Reduction}\allowbreak[J].
+\newblock 2013, 8491:\penalty0 224-226.
+
+\bibitem[{Tan {\bibetal}(2018)Tan, Zhang, Liu, Liu, Zhu, and Dou}]{Tan2018}
+TAN~G, ZHANG~P, LIU~Q, et~al.
+\newblock {Adaptive Malicious URL Detection: Learning in the Presence of
+ Concept Drifts}\allowbreak[J].
+\newblock Proceedings - 17th IEEE International Conference on Trust, Security
+ and Privacy in Computing and Communications and 12th IEEE International
+ Conference on Big Data Science and Engineering, Trustcom/BigDataSE 2018,
+ 2018:\penalty0 737-743.
+
+\bibitem[{Tang {\biband} Wang(2017)Tang and Wang}]{Tang2017}
+TANG~P, WANG~Z.
+\newblock {Classification of Internet Video Traffic Using}\allowbreak[J].
+\newblock 2017\penalty0 (978).
+
+\bibitem[{Vastel {\bibetal}(2018)Vastel, Inria, Laperdrix, Rudametkin, Inria,
+ Rouvoy, Iuf, Vastel, Laperdrix, and Rouvoy}]{Vastel2018a}
+VASTEL~A, INRIA~I, LAPERDRIX~P, et~al.
+\newblock {Fp-Scanner : The Privacy Implications of Browser Fingerprint
+ Inconsistencies}\allowbreak[J].
+\newblock USENIX Security, 2018.
+
+\bibitem[{Wang {\bibetal}(2014)Wang, Zhao, Hoi, and Jin}]{Wang2014}
+WANG~J, ZHAO~P, HOI~S~C, et~al.
+\newblock {Online feature selection and its applications}\allowbreak[J].
+\newblock IEEE Transactions on Knowledge and Data Engineering, 2014,
+ 26\penalty0 (3):\penalty0 698-710.
+
+\bibitem[{Wang {\bibetal}(2015)Wang, Li, Deng, Zhang, and Feng}]{Wang2015}
+WANG~J, LI~G, DENG~D, et~al.
+\newblock {Two birds with one stone: An efficient hierarchical framework for
+ top-k and threshold-based string similarity search}\allowbreak[J].
+\newblock Proceedings - International Conference on Data Engineering, 2015,
+ 2015-May:\penalty0 519-530.
+
+\bibitem[{Wei {\bibetal}()Wei, Jiang, Zhou, and Feng}]{Wei}
+WEI~J, JIANG~H, ZHOU~K, et~al.
+\newblock {MAD2 : A Scalable High-Throughput Exact Deduplication Approach for
+ Network Backup Services}\allowbreak[J].
+
+\bibitem[{Winter {\bibetal}(2013)Winter, Schneider, and Yannikos}]{Winter2013}
+WINTER~C, SCHNEIDER~M, YANNIKOS~Y.
+\newblock {F2S2: Fast forensic similarity search through indexing piecewise
+ hash signatures}\allowbreak[J/OL].
+\newblock Digital Investigation, 2013, 10\penalty0 (4):\penalty0 361-371.
+\newblock \url{http://dx.doi.org/10.1016/j.diin.2013.08.003}.
+
+\bibitem[{Wu {\bibetal}()Wu, Yu, and Wolf}]{Wu}
+WU~K~L, YU~P~S, WOLF~J~L.
+\newblock {Segment-Based Proxy Caching of Multimedia Streams}\allowbreak[J].
+
+\bibitem[{Xiao {\bibetal}(2017)Xiao, Xue, Miao, Li, Chen, Wu, Li, and
+ Zhou}]{Xiao2017}
+XIAO~W, XUE~J, MIAO~Y, et~al.
+\newblock {Tux2: Distributed Graph Computation for Machine
+ Learning}\allowbreak[J/OL].
+\newblock Nsdi, 2017:\penalty0 669-682.
+\newblock
+ \url{https://www.usenix.org/conference/nsdi17/technical-sessions/presentation/xiao}.
+
+\bibitem[{Yu {\biband} Liu(2004)Yu and Liu}]{Yu2004a}
+YU~L, LIU~H.
+\newblock {Efficient Feature Selection via Analysis of Relevance and
+ Redundancy}\allowbreak[J/OL].
+\newblock J. Mach. Learn. Res., 2004, 5:\penalty0 1205-1224.
+\newblock \url{http://dl.acm.org/citation.cfm?id=1005332.1044700}.
+
+\bibitem[{Yu {\bibetal}()Yu, Sapra, Shen, and Ye}]{Yu}
+YU~L, SAPRA~K, SHEN~H, et~al.
+\newblock {Cooperative End-to-End Tra ffi c Redundancy Elimination for Reducing
+ Cloud Bandwidth Cost}\allowbreak[J].
+
+\bibitem[{Zhang {\bibetal}(2018)Zhang, Jin, Ratnasamy, Wawrzynek, and
+ Lee}]{Zhang2018}
+ZHANG~B, JIN~X, RATNASAMY~S, et~al.
+\newblock {AWStream}\allowbreak[J/OL].
+\newblock Proceedings of the 2018 Conference of the ACM Special Interest Group
+ on Data Communication - SIGCOMM '18, 2018:\penalty0 236-252.
+\newblock \url{http://dl.acm.org/citation.cfm?doid=3230543.3230554}.
+
+\bibitem[{Zhang {\bibetal}(2017)Zhang, Ananthanarayanan, Bodik, Philipose,
+ Bahl, and Freedman}]{Zhang2017}
+ZHANG~H, ANANTHANARAYANAN~G, BODIK~P, et~al.
+\newblock {Live video analytics at scale with approximation and
+ delay-tolerance}\allowbreak[J/OL].
+\newblock Proceedings of the 14th USENIX Conference on Networked Systems Design
+ and Implementation, 2017:\penalty0 377-392.
+\newblock \url{https://dl.acm.org/citation.cfm?id=3154661}.
+
+\bibitem[{Zheng {\bibetal}(2018)Zheng, Li, Liu, Sun, and
+ Fang}]{zheng2018hashing}
+ZHENG~C, LI~X, LIU~Q, et~al.
+\newblock Hashing incomplete and unordered network
+ streams\allowbreak[C]//\allowbreak
+IFIP International Conference on Digital Forensics.
+\newblock Springer, 2018: 199-224.
+
+\bibitem[{Zohar {\bibetal}(2014)Zohar, Cidon, and Mokryn}]{Zohar2014}
+ZOHAR~E, CIDON~I, MOKRYN~O.
+\newblock {PACK: Prediction-based cloud bandwidth and cost reduction
+ system}\allowbreak[J].
+\newblock IEEE/ACM Transactions on Networking, 2014, 22\penalty0 (1):\penalty0
+ 39-51.
+
+\end{thebibliography}
diff --git a/Tmp/Thesis.blg b/Tmp/Thesis.blg
new file mode 100644
index 0000000..0500e1b
--- /dev/null
+++ b/Tmp/Thesis.blg
@@ -0,0 +1,82 @@
+This is BibTeX, Version 0.99d (TeX Live 2018/W32TeX)
+Capacity: max_strings=100000, hash_size=100000, hash_prime=85009
+The top-level auxiliary file: ./Tmp/Thesis.aux
+The style file: Biblio/gbt7714-plain.bst
+Reallocated singl_function (elt_size=4) to 100 items from 50.
+Reallocated wiz_functions (elt_size=4) to 6000 items from 3000.
+Database file #1: Biblio/library.bib
+Repeated entry---line 494 of file Biblio/library.bib
+ : @article{Tan2018
+ : ,
+I'm skipping whatever remains of this entry
+Warning--empty key in CNNIC2018
+Warning--empty year in Yu
+Warning--empty year in Aggarwal
+Warning--empty key in aoli2010
+Warning--empty year in Feng
+Warning--empty year in Wu
+Warning--empty year in Wei
+Warning--empty journal in Aggarwal
+Warning--empty year in Aggarwal
+Warning--empty journal in Aggarwal2010EndRE
+Warning--empty journal in Anand2010Cheap
+Warning--empty journal in Feng
+Warning--empty year in Feng
+Warning--empty journal in Feng2013How
+Warning--empty journal in Lei2013Cooperative
+Warning--empty journal in Lumezanu2010The
+Warning--empty journal in Reed2016
+Warning--empty journal in Rhea2003
+Warning--empty journal in Rimmer2017
+Warning--empty journal in Sahoo2017
+Warning--empty journal in Sahoo2017Malicious
+Warning--empty journal in Store2013
+Warning--empty journal in Tang2017
+Warning--empty journal in Wei
+Warning--empty year in Wei
+Warning--empty journal in Wu
+Warning--empty year in Wu
+Warning--empty journal in Yu
+Warning--empty year in Yu
+You've used 57 entries,
+ 4429 wiz_defined-function locations,
+ 1263 strings with 17945 characters,
+and the built_in function-call counts, 335337 in all, are:
+= -- 4595
+> -- 39617
+< -- 51023
++ -- 14048
+- -- 673
+* -- 2899
+:= -- 46386
+add.period$ -- 187
+call.type$ -- 57
+change.case$ -- 538
+chr.to.int$ -- 12653
+cite$ -- 143
+duplicate$ -- 1307
+empty$ -- 2988
+format.name$ -- 1270
+if$ -- 86198
+int.to.chr$ -- 5
+int.to.str$ -- 1
+missing$ -- 0
+newline$ -- 313
+num.names$ -- 277
+pop$ -- 18279
+preamble$ -- 1
+purify$ -- 333
+quote$ -- 0
+skip$ -- 34489
+stack$ -- 0
+substring$ -- 13775
+swap$ -- 131
+text.length$ -- 549
+text.prefix$ -- 0
+top$ -- 0
+type$ -- 963
+warning$ -- 29
+while$ -- 786
+width$ -- 0
+write$ -- 824
+(There was 1 error message)
diff --git a/Tmp/Thesis.lof b/Tmp/Thesis.lof
new file mode 100644
index 0000000..c56ed17
--- /dev/null
+++ b/Tmp/Thesis.lof
@@ -0,0 +1,25 @@
+\addvspace {10.0pt}
+\contentsline {figure}{\numberline {1.1}{\ignorespaces PACK网络冗余检测结果\relax }}{3}{figure.caption.9}
+\contentsline {figure}{\numberline {1.2}{\ignorespaces 基于内容分块原理\relax }}{6}{figure.caption.10}
+\addvspace {10.0pt}
+\contentsline {figure}{\numberline {2.1}{\ignorespaces 多线程传输场景\relax }}{13}{figure.caption.12}
+\addvspace {10.0pt}
+\contentsline {figure}{\numberline {3.1}{\ignorespaces 典型的决策树结构图\relax }}{17}{figure.caption.13}
+\contentsline {figure}{\numberline {3.2}{\ignorespaces 用户发出第一次请求\relax }}{20}{figure.caption.14}
+\contentsline {figure}{\numberline {3.3}{\ignorespaces 用户再次请求\relax }}{21}{figure.caption.15}
+\contentsline {figure}{\numberline {3.4}{\ignorespaces 特征选择的框架\relax }}{21}{figure.caption.16}
+\contentsline {figure}{\numberline {3.5}{\ignorespaces 所有音视频大小分布\relax }}{24}{figure.caption.17}
+\contentsline {figure}{\numberline {3.6}{\ignorespaces 完整度80\%音视频大小分布\relax }}{25}{figure.caption.19}
+\addvspace {10.0pt}
+\contentsline {figure}{\numberline {4.1}{\ignorespaces 基于流式模糊哈希的重复音视频检测方法流程图\relax }}{32}{figure.caption.23}
+\contentsline {figure}{\numberline {4.2}{\ignorespaces 音视频文件编号流程图\relax }}{33}{figure.caption.24}
+\addvspace {10.0pt}
+\contentsline {figure}{\numberline {5.1}{\ignorespaces 面向网络流量的重复音视频实时检测系统框架图\relax }}{37}{figure.caption.28}
+\contentsline {figure}{\numberline {5.2}{\ignorespaces 系统测试原理\relax }}{38}{figure.caption.29}
+\contentsline {figure}{\numberline {5.3}{\ignorespaces 完整度80\%文件数去重\relax }}{39}{figure.caption.31}
+\contentsline {figure}{\numberline {5.4}{\ignorespaces 完整度95\%文件数去重\relax }}{39}{figure.caption.32}
+\contentsline {figure}{\numberline {5.5}{\ignorespaces 完整度80\%文件带宽去重\relax }}{40}{figure.caption.33}
+\contentsline {figure}{\numberline {5.6}{\ignorespaces 完整度95\%文件带宽去重\relax }}{40}{figure.caption.34}
+\addvspace {10.0pt}
+\addvspace {10.0pt}
+\addvspace {10.0pt}
diff --git a/Tmp/Thesis.log b/Tmp/Thesis.log
new file mode 100644
index 0000000..ec3d1f1
--- /dev/null
+++ b/Tmp/Thesis.log
@@ -0,0 +1,2167 @@
+This is XeTeX, Version 3.14159265-2.6-0.99999 (TeX Live 2018/W32TeX) (preloaded format=xelatex 2019.3.18) 17 MAY 2019 16:09
+entering extended mode
+ restricted \write18 enabled.
+ %&-line parsing enabled.
+**Thesis
+(./Thesis.tex
+LaTeX2e <2018-04-01> patch level 2
+Babel <3.18> and hyphenation patterns for 84 language(s) loaded.
+(./Style/ucasthesis.cls
+
+LaTeX Warning: You have requested document class `Style/ucasthesis',
+ but the document class provides `ucasthesis'.
+
+Document Class: ucasthesis 2014/10/01 v1.0 LaTeX document class
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctexbook.cls
+(c:/texlive/2018/texmf-dist/tex/latex/l3kernel/expl3.sty
+Package: expl3 2018/03/05 L3 programming layer (loader)
+
+(c:/texlive/2018/texmf-dist/tex/latex/l3kernel/expl3-code.tex
+Package: expl3 2018/03/05 L3 programming layer (code)
+\c_max_int=\count80
+\l_tmpa_int=\count81
+\l_tmpb_int=\count82
+\g_tmpa_int=\count83
+\g_tmpb_int=\count84
+\g__intarray_font_int=\count85
+\g__prg_map_int=\count86
+\c_log_iow=\count87
+\l_iow_line_count_int=\count88
+\l__iow_line_target_int=\count89
+\l__iow_one_indent_int=\count90
+\l__iow_indent_int=\count91
+\c_zero_dim=\dimen102
+\c_max_dim=\dimen103
+\l_tmpa_dim=\dimen104
+\l_tmpb_dim=\dimen105
+\g_tmpa_dim=\dimen106
+\g_tmpb_dim=\dimen107
+\c_zero_skip=\skip41
+\c_max_skip=\skip42
+\l_tmpa_skip=\skip43
+\l_tmpb_skip=\skip44
+\g_tmpa_skip=\skip45
+\g_tmpb_skip=\skip46
+\c_zero_muskip=\muskip10
+\c_max_muskip=\muskip11
+\l_tmpa_muskip=\muskip12
+\l_tmpb_muskip=\muskip13
+\g_tmpa_muskip=\muskip14
+\g_tmpb_muskip=\muskip15
+\l_keys_choice_int=\count92
+\c__fp_leading_shift_int=\count93
+\c__fp_middle_shift_int=\count94
+\c__fp_trailing_shift_int=\count95
+\c__fp_big_leading_shift_int=\count96
+\c__fp_big_middle_shift_int=\count97
+\c__fp_big_trailing_shift_int=\count98
+\c__fp_Bigg_leading_shift_int=\count99
+\c__fp_Bigg_middle_shift_int=\count100
+\c__fp_Bigg_trailing_shift_int=\count101
+\l__sort_length_int=\count102
+\l__sort_min_int=\count103
+\l__sort_top_int=\count104
+\l__sort_max_int=\count105
+\l__sort_true_max_int=\count106
+\l__sort_block_int=\count107
+\l__sort_begin_int=\count108
+\l__sort_end_int=\count109
+\l__sort_A_int=\count110
+\l__sort_B_int=\count111
+\l__sort_C_int=\count112
+\l__tl_build_start_index_int=\count113
+\l__tl_build_index_int=\count114
+\l__tl_analysis_normal_int=\count115
+\l__tl_analysis_index_int=\count116
+\l__tl_analysis_nesting_int=\count117
+\l__tl_analysis_type_int=\count118
+\l__regex_internal_a_int=\count119
+\l__regex_internal_b_int=\count120
+\l__regex_internal_c_int=\count121
+\l__regex_balance_int=\count122
+\l__regex_group_level_int=\count123
+\l__regex_mode_int=\count124
+\c__regex_cs_in_class_mode_int=\count125
+\c__regex_cs_mode_int=\count126
+\l__regex_catcodes_int=\count127
+\l__regex_default_catcodes_int=\count128
+\c__regex_catcode_L_int=\count129
+\c__regex_catcode_O_int=\count130
+\c__regex_catcode_A_int=\count131
+\c__regex_all_catcodes_int=\count132
+\l__regex_show_lines_int=\count133
+\l__regex_min_state_int=\count134
+\l__regex_max_state_int=\count135
+\l__regex_left_state_int=\count136
+\l__regex_right_state_int=\count137
+\l__regex_capturing_group_int=\count138
+\l__regex_min_pos_int=\count139
+\l__regex_max_pos_int=\count140
+\l__regex_curr_pos_int=\count141
+\l__regex_start_pos_int=\count142
+\l__regex_success_pos_int=\count143
+\l__regex_curr_char_int=\count144
+\l__regex_curr_catcode_int=\count145
+\l__regex_last_char_int=\count146
+\l__regex_case_changed_char_int=\count147
+\l__regex_curr_state_int=\count148
+\l__regex_step_int=\count149
+\l__regex_min_active_int=\count150
+\l__regex_max_active_int=\count151
+\l__regex_replacement_csnames_int=\count152
+\l__regex_match_count_int=\count153
+\l__regex_min_submatch_int=\count154
+\l__regex_submatch_int=\count155
+\l__regex_zeroth_submatch_int=\count156
+\g__regex_trace_regex_int=\count157
+\c_empty_box=\box26
+\l_tmpa_box=\box27
+\l_tmpb_box=\box28
+\g_tmpa_box=\box29
+\g_tmpb_box=\box30
+\l__box_top_dim=\dimen108
+\l__box_bottom_dim=\dimen109
+\l__box_left_dim=\dimen110
+\l__box_right_dim=\dimen111
+\l__box_top_new_dim=\dimen112
+\l__box_bottom_new_dim=\dimen113
+\l__box_left_new_dim=\dimen114
+\l__box_right_new_dim=\dimen115
+\l__box_internal_box=\box31
+\l__coffin_internal_box=\box32
+\l__coffin_internal_dim=\dimen116
+\l__coffin_offset_x_dim=\dimen117
+\l__coffin_offset_y_dim=\dimen118
+\l__coffin_x_dim=\dimen119
+\l__coffin_y_dim=\dimen120
+\l__coffin_x_prime_dim=\dimen121
+\l__coffin_y_prime_dim=\dimen122
+\c_empty_coffin=\box33
+\l__coffin_aligned_coffin=\box34
+\l__coffin_aligned_internal_coffin=\box35
+\l_tmpa_coffin=\box36
+\l_tmpb_coffin=\box37
+\l__coffin_display_coffin=\box38
+\l__coffin_display_coord_coffin=\box39
+\l__coffin_display_pole_coffin=\box40
+\l__coffin_display_offset_dim=\dimen123
+\l__coffin_display_x_dim=\dimen124
+\l__coffin_display_y_dim=\dimen125
+\l__coffin_bounding_shift_dim=\dimen126
+\l__coffin_left_corner_dim=\dimen127
+\l__coffin_right_corner_dim=\dimen128
+\l__coffin_bottom_corner_dim=\dimen129
+\l__coffin_top_corner_dim=\dimen130
+\l__coffin_scaled_total_height_dim=\dimen131
+\l__coffin_scaled_width_dim=\dimen132
+)
+(c:/texlive/2018/texmf-dist/tex/latex/l3kernel/l3xdvipdfmx.def
+File: l3xdvidpfmx.def 2017/03/18 v L3 Experimental driver: xdvipdfmx
+\g__driver_image_int=\count158
+))
+Document Class: ctexbook 2018/01/28 v2.4.12 Chinese adapter for class book (CTE
+X)
+(c:/texlive/2018/texmf-dist/tex/latex/l3packages/xparse/xparse.sty
+Package: xparse 2018/02/21 L3 Experimental document command parser
+\l__xparse_current_arg_int=\count159
+\g__xparse_grabber_int=\count160
+\l__xparse_m_args_int=\count161
+\l__xparse_mandatory_args_int=\count162
+\l__xparse_v_nesting_int=\count163
+)
+(c:/texlive/2018/texmf-dist/tex/latex/l3packages/l3keys2e/l3keys2e.sty
+Package: l3keys2e 2018/02/21 LaTeX2e option processing using LaTeX3 keys
+)
+\g__file_internal_ior=\read1
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctexhook.sty
+Package: ctexhook 2018/01/28 v2.4.12 Document and package hooks (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctexpatch.sty
+Package: ctexpatch 2018/01/28 v2.4.12 Patching commands (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/base/fix-cm.sty
+Package: fix-cm 2015/01/14 v1.1t fixes to LaTeX
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/ts1enc.def
+File: ts1enc.def 2001/06/05 v3.0e (jk/car/fm) Standard LaTeX file
+))
+(c:/texlive/2018/texmf-dist/tex/latex/ms/everysel.sty
+Package: everysel 2011/10/28 v1.2 EverySelectfont Package (MS)
+)
+\l__ctex_tmp_int=\count164
+\l__ctex_tmp_box=\box41
+\l__ctex_tmp_dim=\dimen133
+\g__ctex_font_size_flag=\count165
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/config/ctexopts.cfg
+File: ctexopts.cfg 2018/01/28 v2.4.12 Option configuration file (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/base/book.cls
+Document Class: book 2014/09/29 v1.4h Standard LaTeX document class
+(c:/texlive/2018/texmf-dist/tex/latex/base/bk12.clo
+File: bk12.clo 2014/09/29 v1.4h Standard LaTeX file (size option)
+)
+\c@part=\count166
+\c@chapter=\count167
+\c@section=\count168
+\c@subsection=\count169
+\c@subsubsection=\count170
+\c@paragraph=\count171
+\c@subparagraph=\count172
+\c@figure=\count173
+\c@table=\count174
+\abovecaptionskip=\skip47
+\belowcaptionskip=\skip48
+\bibindent=\dimen134
+)
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/engine/ctex-engine-xetex.def
+File: ctex-engine-xetex.def 2018/01/28 v2.4.12 XeLaTeX adapter (CTEX)
+
+(c:/texlive/2018/texmf-dist/tex/xelatex/xecjk/xeCJK.sty
+Package: xeCJK 2018/04/07 v3.7.0 Typesetting CJK scripts with XeLaTeX
+
+(c:/texlive/2018/texmf-dist/tex/latex/l3packages/xtemplate/xtemplate.sty
+Package: xtemplate 2018/02/21 L3 Experimental prototype document functions
+\l__xtemplate_tmp_dim=\dimen135
+\l__xtemplate_tmp_int=\count175
+\l__xtemplate_tmp_muskip=\muskip16
+\l__xtemplate_tmp_skip=\skip49
+)
+\l__xeCJK_tmp_int=\count176
+\l__xeCJK_tmp_box=\box42
+\l__xeCJK_tmp_dim=\dimen136
+\l__xeCJK_tmp_skip=\skip50
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKShipoutHook with sig. '' on line 150.
+.................................................
+\g__xeCJK_space_factor_int=\count177
+\l__xeCJK_begin_int=\count178
+\l__xeCJK_end_int=\count179
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \makexeCJKactive with sig. '' on line 396.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \makexeCJKinactive with sig. '' on line 398.
+.................................................
+\c__xeCJK_CJK_class_int=\XeTeXcharclass1
+\c__xeCJK_FullLeft_class_int=\XeTeXcharclass2
+\c__xeCJK_FullRight_class_int=\XeTeXcharclass3
+\c__xeCJK_HalfLeft_class_int=\XeTeXcharclass4
+\c__xeCJK_HalfRight_class_int=\XeTeXcharclass5
+\c__xeCJK_NormalSpace_class_int=\XeTeXcharclass6
+\c__xeCJK_CM_class_int=\XeTeXcharclass7
+\c__xeCJK_HangulJamo_class_int=\XeTeXcharclass8
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKDeclareCharClass with sig. 's>{\TrimSpaces }mm' on
+. line 580.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \__xeCJK_set_char_class_aux:Nnw with sig.
+. 'm>{\SplitArgument {1}{->}}m' on line 601.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \normalspacedchars with sig. 'm' on line 647.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKResetPunctClass with sig. '' on line 658.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKResetCharClass with sig. '' on line 670.
+.................................................
+\l__xeCJK_last_skip=\skip51
+\g__xeCJK_node_int=\count180
+\c__xeCJK_CJK_node_dim=\dimen137
+\c__xeCJK_CJK-space_node_dim=\dimen138
+\c__xeCJK_default_node_dim=\dimen139
+\c__xeCJK_default-space_node_dim=\dimen140
+\c__xeCJK_CJK-widow_node_dim=\dimen141
+\c__xeCJK_normalspace_node_dim=\dimen142
+\l__xeCJK_ccglue_skip=\skip52
+\l__xeCJK_ecglue_skip=\skip53
+\l__xeCJK_punct_kern_skip=\skip54
+\l__xeCJK_last_penalty_int=\count181
+\l__xeCJK_last_bound_dim=\dimen143
+\l__xeCJK_last_kern_dim=\dimen144
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKnobreak with sig. '' on line 1889.
+.................................................
+\l__xeCJK_widow_penalty_int=\count182
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKDeclareSubCJKBlock with sig. 's>{\TrimSpaces }mm' on
+. line 2084.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKCancelSubCJKBlock with sig. 'sm' on line 2095.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKRestoreSubCJKBlock with sig. 'sm' on line 2104.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKsetwidth with sig. 'smm' on line 2202.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKsetkern with sig. 'mmm' on line 2206.
+.................................................
+.................................................
+. xtemplate info: "declare-object-type"
+.
+. Declaring object type 'xeCJK/punctuation' taking 0 argument(s) on line 2362.
+.................................................
+\l__xeCJK_fixed_punct_width_dim=\dimen145
+\l__xeCJK_mixed_punct_width_dim=\dimen146
+\l__xeCJK_middle_punct_width_dim=\dimen147
+\l__xeCJK_fixed_margin_width_dim=\dimen148
+\l__xeCJK_mixed_margin_width_dim=\dimen149
+\l__xeCJK_middle_margin_width_dim=\dimen150
+\l__xeCJK_bound_punct_width_dim=\dimen151
+\l__xeCJK_bound_margin_width_dim=\dimen152
+\l__xeCJK_margin_minimum_dim=\dimen153
+\l__xeCJK_kerning_total_width_dim=\dimen154
+\l__xeCJK_same_align_margin_dim=\dimen155
+\l__xeCJK_different_align_margin_dim=\dimen156
+\l__xeCJK_kerning_margin_width_dim=\dimen157
+\l__xeCJK_kerning_margin_minimum_dim=\dimen158
+\l__xeCJK_bound_dim=\dimen159
+\l__xeCJK_reverse_bound_dim=\dimen160
+\l__xeCJK_minimum_bound_dim=\dimen161
+\l__xeCJK_kerning_margin_dim=\dimen162
+\l__xeCJK_original_margin_dim=\dimen163
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKDeclarePunctStyle with sig. '>{\TrimSpaces }mm' on
+. line 2942.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKEditPunctStyle with sig. '>{\TrimSpaces }mm' on line
+. 2955.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setCJKfallbackfamilyfont with sig. 'mom' on line 3059.
+.................................................
+\g__xeCJK_family_int=\count183
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CJKfamily with sig. 't+t-m' on line 3493.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \__xeCJK_gobble_CJKfamily:wn with sig. 't+t-m' on line
+. 3558.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \__xeCJK_post_arg:w with sig. 'mmmO{}' on line 3618.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setCJKmainfont with sig. 'om' on line 3624.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setCJKsansfont with sig. 'om' on line 3631.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setCJKmonofont with sig. 'om' on line 3637.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setCJKfamilyfont with sig. 'mom' on line 3648.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newCJKfontfamily with sig. 'omom' on line 3658.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CJKfontspec with sig. 'om' on line 3664.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \defaultCJKfontfeatures with sig. 'm' on line 3687.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \addCJKfontfeatures with sig. 'sO{}m' on line 3693.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setCJKmathfont with sig. 'om' on line 3870.
+.................................................
+\l__xeCJK_fam_int=\count184
+\g__xeCJK_fam_allocation_int=\count185
+\l__xeCJK_verb_case_int=\count186
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKVerbAddon with sig. '' on line 4117.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKOffVerbAddon with sig. '' on line 4121.
+.................................................
+\l__xeCJK_verb_exspace_skip=\skip55
+
+(c:/texlive/2018/texmf-dist/tex/latex/fontspec/fontspec.sty
+Package: fontspec 2017/11/09 v2.6g Font selection for XeLaTeX and LuaLaTeX
+
+(c:/texlive/2018/texmf-dist/tex/latex/fontspec/fontspec-xetex.sty
+Package: fontspec-xetex 2017/11/09 v2.6g Font selection for XeLaTeX and LuaLaTe
+X
+\l__fontspec_script_int=\count187
+\l__fontspec_language_int=\count188
+\l__fontspec_strnum_int=\count189
+\l__fontspec_tmp_int=\count190
+\l__fontspec_em_int=\count191
+\l__fontspec_emdef_int=\count192
+\l__fontspec_strong_int=\count193
+\l__fontspec_strongdef_int=\count194
+\l__fontspec_tmpa_dim=\dimen164
+\l__fontspec_tmpb_dim=\dimen165
+\l__fontspec_tmpc_dim=\dimen166
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/fontenc.sty
+Package: fontenc 2017/04/05 v2.0i Standard LaTeX package
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/tuenc.def
+File: tuenc.def 2017/04/05 v2.0i Standard LaTeX file
+LaTeX Font Info: Redeclaring font encoding TU on input line 82.
+))
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \fontspec with sig. 'O{}mO{}' on line 542.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setmainfont with sig. 'O{}mO{}' on line 546.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setsansfont with sig. 'O{}mO{}' on line 550.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setmonofont with sig. 'O{}mO{}' on line 554.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setmathrm with sig. 'O{}mO{}' on line 558.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setboldmathrm with sig. 'O{}mO{}' on line 562.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setmathsf with sig. 'O{}mO{}' on line 566.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setmathtt with sig. 'O{}mO{}' on line 570.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \setromanfont with sig. 'O{}mO{}' on line 574.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newfontfamily with sig. 'mO{}mO{}' on line 578.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newfontface with sig. 'mO{}mO{}' on line 582.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \defaultfontfeatures with sig. 't+om' on line 586.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \addfontfeatures with sig. 'm' on line 590.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \addfontfeature with sig. 'm' on line 594.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newfontfeature with sig. 'mm' on line 598.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newAATfeature with sig. 'mmmm' on line 602.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newopentypefeature with sig. 'mmm' on line 606.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newICUfeature with sig. 'mmm' on line 610.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \aliasfontfeature with sig. 'mm' on line 614.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \aliasfontfeatureoption with sig. 'mmm' on line 618.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newfontscript with sig. 'mm' on line 622.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \newfontlanguage with sig. 'mm' on line 626.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \DeclareFontsExtensions with sig. 'm' on line 630.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \IfFontFeatureActiveTF with sig. 'mmm' on line 634.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \EncodingCommand with sig. 'mO{}m' on line 3632.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \EncodingAccent with sig. 'mm' on line 3638.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \EncodingSymbol with sig. 'mm' on line 3644.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \EncodingComposite with sig. 'mmm' on line 3650.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \EncodingCompositeCommand with sig. 'mmm' on line 3656.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \DeclareUnicodeEncoding with sig. 'mm' on line 3681.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \UndeclareSymbol with sig. 'm' on line 3687.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \UndeclareAccent with sig. 'm' on line 3693.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \UndeclareCommand with sig. 'm' on line 3699.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \UndeclareComposite with sig. 'mm' on line 3706.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/fontspec/fontspec.cfg)
+LaTeX Info: Redefining \itshape on input line 3891.
+LaTeX Info: Redefining \slshape on input line 3896.
+LaTeX Info: Redefining \scshape on input line 3901.
+LaTeX Info: Redefining \upshape on input line 3906.
+LaTeX Info: Redefining \em on input line 3936.
+LaTeX Info: Redefining \emph on input line 3961.
+LaTeX Info: Redefining \- on input line 4015.
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \oldstylenums with sig. 'm' on line 4110.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \liningnums with sig. 'm' on line 4114.
+.................................................
+))
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKsetup with sig. '+m' on line 4422.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKsetemboldenfactor with sig. 'm' on line 4424.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKsetslantfactor with sig. 'm' on line 4426.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \punctstyle with sig. 'm' on line 4427.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKplainchr with sig. '' on line 4428.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CJKsetecglue with sig. 'm' on line 4429.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CJKspace with sig. '' on line 4431.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CJKnospace with sig. '' on line 4432.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKallowbreakbetweenpuncts with sig. '' on line 4434.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKnobreakbetweenpuncts with sig. '' on line 4436.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKenablefallback with sig. '' on line 4438.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKdisablefallback with sig. '' on line 4440.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKsetcharclass with sig. 'mmm' on line 4445.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \fontfamily with sig. 'm' on line 4628.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/xelatex/xecjk/xeCJK.cfg
+File: xeCJK.cfg 2018/04/07 v3.7.0 Configuration file for xeCJK package
+))
+(c:/texlive/2018/texmf-dist/tex/xelatex/xecjk/xeCJKfntef.sty
+Package: xeCJKfntef 2018/04/07 v3.7.0 xeCJK font effect
+
+(c:/texlive/2018/texmf-dist/tex/generic/ulem/ulem.sty
+\UL@box=\box43
+\UL@hyphenbox=\box44
+\UL@skip=\skip56
+\UL@hook=\toks14
+\UL@height=\dimen167
+\UL@pe=\count195
+\UL@pixel=\dimen168
+\ULC@box=\box45
+Package: ulem 2012/05/18
+\ULdepth=\dimen169
+)
+(c:/texlive/2018/texmf-dist/tex/latex/environ/environ.sty
+Package: environ 2014/05/04 v0.3 A new way to define environments
+
+(c:/texlive/2018/texmf-dist/tex/latex/trimspaces/trimspaces.sty
+Package: trimspaces 2009/09/17 v1.1 Trim spaces around a token list
+)
+\@envbody=\toks15
+)
+(c:/texlive/2018/texmf-dist/tex/latex/cjk/texinput/CJKfntef.sty
+Package: CJKfntef 2015/04/18 4.8.4
+\CJK@fntefSkip=\skip57
+\CJK@nest=\count196
+\CJK@fntefDimen=\dimen170
+\CJK@underdotBox=\box46
+\CJK@ULbox=\box47
+\CJK@underanyskip=\dimen171
+)
+\l__xeCJK_space_skip=\skip58
+\c__xeCJK_ulem-begin_node_dim=\dimen172
+\c__xeCJK_null_box=\box48
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \xeCJKfntefon with sig. 'st-so' on line 657.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKunderline with sig. 'st-so' on line 673.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \varCJKunderline with sig. '' on line 675.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKunderwave with sig. 'st-so' on line 685.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKunderdblline with sig. 'st-so' on line 709.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKsout with sig. 'st-so' on line 728.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKxout with sig. 'st-so' on line 745.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKunderanyline with sig. 'st-somm' on line 766.
+.................................................
+\l__xeCJK_fntef_box=\box49
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKunderanysymbol with sig. 'ommm' on line 940.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \CJKunderdot with sig. 'om' on line 949.
+.................................................
+\l__xeCJK_under_symbol_box=\box50
+.................................................
+. LaTeX info: "xparse/redefine-environment"
+.
+. Redefining environment 'CJKfilltwosides' with sig. 'O{c}m' on line 1053.
+.................................................
+\c__xeCJK_filll_skip=\skip59
+)
+\ccwd=\dimen173
+\l__ctex_ccglue_skip=\skip60
+)
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \ctexset with sig. '' on line 388.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXsetup with sig. '+o>{\TrimSpaces }m' on line 394.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXoptions with sig. '+o' on line 400.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXsetfont with sig. '' on line 418.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \ziju with sig. 'm' on line 490.
+.................................................
+\l__ctex_ziju_dim=\dimen174
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXindent with sig. '' on line 531.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXnoindent with sig. '' on line 537.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/zhnumber/zhnumber.sty
+Package: zhnumber 2018/01/28 v2.6 Typesetting numbers with Chinese glyphs
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumber with sig. '+o+m' on line 50.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumberwithoptions with sig. '+m+m' on line 57.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnum with sig. '+o+m' on line 111.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumwithoptions with sig. '+m+m' on line 118.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdig with sig. '+o+m' on line 295.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdigwithoptions with sig. '+m+m' on line 302.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdigits with sig. '+s+o+m' on line 316.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdigitswithoptions with sig. '+m+m+m' on line 323.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhdate with sig. '+s+m' on line 382.
+.................................................
+\l__zhnum_scale_int=\count197
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumExtendScaleMap with sig. '>{\TrimSpaces }+o+m' on
+. line 504.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zhnumsetup with sig. '+m' on line 934.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/zhnumber/zhnumber-utf8.cfg
+File: zhnumber-utf8.cfg 2018/01/28 v2.6 Chinese numerals with UTF8 encoding
+))
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXnumber with sig. 'mm' on line 554.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTEXdigits with sig. 'mm' on line 556.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \ctex_assign_heading_name:nn with sig. 'm>{\SplitArgument
+. {\c_one }{,}}+m' on line 681.
+.................................................
+\l__ctex_heading_skip=\skip61
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \partmark with sig. 'm' on line 728.
+.................................................
+.................................................
+. LaTeX info: "xparse/redefine-command"
+.
+. Redefining command \refstepcounter with sig. 'm' on line 1323.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/scheme/ctex-scheme-chinese-book.def
+File: ctex-scheme-chinese-book.def 2018/01/28 v2.4.12 Chinese scheme for book (
+CTEX)
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/config/ctex-name-utf8.cfg
+File: ctex-name-utf8.cfg 2018/01/28 v2.4.12 Caption with encoding UTF8 (CTEX)
+))
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \zihao with sig. 'm' on line 1326.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/ctex-cs4size.clo
+File: ctex-cs4size.clo 2018/01/28 v2.4.12 cs4size option (CTEX)
+)
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \CTeX with sig. '' on line 1430.
+.................................................
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/fontset/ctex-fontset-windows.def
+File: ctex-fontset-windows.def 2018/01/28 v2.4.12 Windows fonts definition (CTE
+X)
+
+(c:/texlive/2018/texmf-dist/tex/latex/ctex/fontset/ctex-fontset-windowsnew.def
+File: ctex-fontset-windowsnew.def 2018/01/28 v2.4.12 Windows fonts definition f
+or Vista or later version (CTEX)
+\g__fontspec_family_SimSun_int=\count198
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "KaiTi/B" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "SimHei/I" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "SimSun/BI" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "defining-font"
+.
+. Font family 'SimSun(0)' created for font 'SimSun' with options
+. [Script={CJK},BoldFont={SimHei},ItalicFont={KaiTi}].
+.
+. This font family consists of the following NFSS series/shapes:
+.
+. - 'normal' (m/n) with NFSS spec.: <->"SimSun/OT:script=hani;language=DFLT;"
+. - 'small caps' (m/sc) with NFSS spec.:
+. - 'bold' (bx/n) with NFSS spec.: <->"SimHei/OT:script=hani;language=DFLT;"
+. - 'bold small caps' (bx/sc) with NFSS spec.:
+. - 'italic' (m/it) with NFSS spec.: <->"KaiTi/OT:script=hani;language=DFLT;"
+. - 'italic small caps' (m/itsc) with NFSS spec.:
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \songti with sig. '' on line 111.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \heiti with sig. '' on line 112.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \fangsong with sig. '' on line 113.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \kaishu with sig. '' on line 114.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \lishu with sig. '' on line 115.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \youyuan with sig. '' on line 116.
+.................................................
+.................................................
+. LaTeX info: "xparse/define-command"
+.
+. Defining command \yahei with sig. '' on line 117.
+.................................................
+))) (c:/texlive/2018/texmf-dist/tex/latex/ctex/config/ctex.cfg
+File: ctex.cfg 2018/01/28 v2.4.12 Configuration file (CTEX)
+)
+(c:/texlive/2018/texmf-dist/tex/generic/ifxetex/ifxetex.sty
+Package: ifxetex 2010/09/12 v0.6 Provides ifxetex conditional
+)
+(c:/texlive/2018/texmf-dist/tex/latex/etoolbox/etoolbox.sty
+Package: etoolbox 2018/02/11 v2.5e e-TeX tools for LaTeX (JAW)
+\etb@tempcnta=\count199
+))
+(./Style/ucasthesis.cfg
+File: ucasthesis.cfg 2014/10/01 v1.0 class configuration file
+) (./Style/artratex.sty
+
+LaTeX Warning: You have requested package `Style/artratex',
+ but the package provides `artratex'.
+
+Package: artratex 2014/10/01 v0.1 LaTeX macros package
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/ifluatex.sty
+Package: ifluatex 2016/05/16 v1.4 Provides the ifluatex switch (HO)
+Package ifluatex Info: LuaTeX not detected.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsmath.sty
+Package: amsmath 2017/09/02 v2.17a AMS math features
+\@mathmargin=\skip62
+
+For additional information on amsmath, use the `?' option.
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amstext.sty
+Package: amstext 2000/06/29 v2.01 AMS text
+
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsgen.sty
+File: amsgen.sty 1999/11/30 v2.0 generic functions
+\@emptytoks=\toks16
+\ex@=\dimen175
+))
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsbsy.sty
+Package: amsbsy 1999/11/29 v1.2d Bold Symbols
+\pmbraise@=\dimen176
+)
+(c:/texlive/2018/texmf-dist/tex/latex/amsmath/amsopn.sty
+Package: amsopn 2016/03/08 v2.02 operator names
+)
+\inf@bad=\count266
+LaTeX Info: Redefining \frac on input line 213.
+\uproot@=\count267
+\leftroot@=\count268
+LaTeX Info: Redefining \overline on input line 375.
+\classnum@=\count269
+\DOTSCASE@=\count270
+LaTeX Info: Redefining \ldots on input line 472.
+LaTeX Info: Redefining \dots on input line 475.
+LaTeX Info: Redefining \cdots on input line 596.
+\Mathstrutbox@=\box51
+\strutbox@=\box52
+\big@size=\dimen177
+LaTeX Font Info: Redeclaring font encoding OML on input line 712.
+LaTeX Font Info: Redeclaring font encoding OMS on input line 713.
+\macc@depth=\count271
+\c@MaxMatrixCols=\count272
+\dotsspace@=\muskip17
+\c@parentequation=\count273
+\dspbrk@lvl=\count274
+\tag@help=\toks17
+\row@=\count275
+\column@=\count276
+\maxfields@=\count277
+\andhelp@=\toks18
+\eqnshift@=\dimen178
+\alignsep@=\dimen179
+\tagshift@=\dimen180
+\tagwidth@=\dimen181
+\totwidth@=\dimen182
+\lineht@=\dimen183
+\@envbody=\toks19
+\multlinegap=\skip63
+\multlinetaggap=\skip64
+\mathdisplay@stack=\toks20
+LaTeX Info: Redefining \[ on input line 2817.
+LaTeX Info: Redefining \] on input line 2818.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/amscls/amsthm.sty
+Package: amsthm 2017/10/31 v2.20.4
+\thm@style=\toks21
+\thm@bodyfont=\toks22
+\thm@headfont=\toks23
+\thm@notefont=\toks24
+\thm@headpunct=\toks25
+\thm@preskip=\skip65
+\thm@postskip=\skip66
+\thm@headsep=\skip67
+\dth@everypar=\toks26
+)
+(c:/texlive/2018/texmf-dist/tex/latex/amsfonts/amssymb.sty
+Package: amssymb 2013/01/14 v3.01 AMS font symbols
+
+(c:/texlive/2018/texmf-dist/tex/latex/amsfonts/amsfonts.sty
+Package: amsfonts 2013/01/14 v3.01 Basic AMSFonts support
+\symAMSa=\mathgroup4
+\symAMSb=\mathgroup5
+LaTeX Font Info: Overwriting math alphabet `\mathfrak' in version `bold'
+(Font) U/euf/m/n --> U/euf/b/n on input line 106.
+))
+\c@theorem=\count278
+\c@definition=\count279
+\c@example=\count280
+
+*************************************************
+* xeCJK warning: "CJKfamily-redef"
+*
+* Redefining CJKfamily `\CJKrmdefault' (SimSun(0)).
+*************************************************
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "KaiTi/B" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "SimSun/I" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "SimSun/BI" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "defining-font"
+.
+. Font family 'SimSun(1)' created for font 'SimSun' with options
+. [Script={CJK},AutoFakeBold={4},ItalicFont={KaiTi}].
+.
+. This font family consists of the following NFSS series/shapes:
+.
+. - 'normal' (m/n) with NFSS spec.: <->"SimSun/OT:script=hani;language=DFLT;"
+. - 'small caps' (m/sc) with NFSS spec.:
+. - 'bold' (bx/n) with NFSS spec.:
+. <->"SimSun/OT:script=hani;language=DFLT;embolden=4;"
+. - 'bold small caps' (bx/sc) with NFSS spec.:
+. - 'italic' (m/it) with NFSS spec.: <->"KaiTi/OT:script=hani;language=DFLT;"
+. - 'italic small caps' (m/itsc) with NFSS spec.:
+.................................................
+*************************************************
+* xeCJK warning: "CJKfamily-redef"
+*
+* Redefining CJKfamily `\CJKsfdefault' (Microsoft YaHei).
+*************************************************
+*************************************************
+* xeCJK warning: "CJKfamily-redef"
+*
+* Redefining CJKfamily `\CJKttdefault' (FangSong).
+*************************************************
+.................................................
+. fontspec info: "defining-font"
+.
+. Font family 'entextrm' created for font 'Times New Roman' with options
+. [Ligatures=TeX,NFSSFamily=entextrm].
+.
+. This font family consists of the following NFSS series/shapes:
+.
+. - 'normal' (m/n) with NFSS spec.: <->"Times New
+. Roman/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'small caps' (m/sc) with NFSS spec.: <->"Times New
+. Roman/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+. - 'bold' (bx/n) with NFSS spec.: <->"Times New
+. Roman/B/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'bold small caps' (bx/sc) with NFSS spec.: <->"Times New
+. Roman/B/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+. - 'italic' (m/it) with NFSS spec.: <->"Times New
+. Roman/I/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'italic small caps' (m/itsc) with NFSS spec.: <->"Times New
+. Roman/I/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+. - 'bold italic' (bx/it) with NFSS spec.: <->"Times New
+. Roman/BI/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'bold italic small caps' (bx/itsc) with NFSS spec.: <->"Times New
+. Roman/BI/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+.................................................
+LaTeX Info: Redefining \rmfamily on input line 277.
+.................................................
+. fontspec info: "defining-font"
+.
+. Font family 'entextsf' created for font 'Times New Roman' with options
+. [Ligatures=TeX,NFSSFamily=entextsf].
+.
+. This font family consists of the following NFSS series/shapes:
+.
+. - 'normal' (m/n) with NFSS spec.: <->"Times New
+. Roman/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'small caps' (m/sc) with NFSS spec.: <->"Times New
+. Roman/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+. - 'bold' (bx/n) with NFSS spec.: <->"Times New
+. Roman/B/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'bold small caps' (bx/sc) with NFSS spec.: <->"Times New
+. Roman/B/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+. - 'italic' (m/it) with NFSS spec.: <->"Times New
+. Roman/I/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'italic small caps' (m/itsc) with NFSS spec.: <->"Times New
+. Roman/I/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+. - 'bold italic' (bx/it) with NFSS spec.: <->"Times New
+. Roman/BI/OT:script=latn;language=DFLT;mapping=tex-text;"
+. - 'bold italic small caps' (bx/itsc) with NFSS spec.: <->"Times New
+. Roman/BI/OT:script=latn;language=DFLT;+smcp;mapping=tex-text;"
+.................................................
+LaTeX Info: Redefining \sffamily on input line 279.
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/newtxmath.sty
+Package: newtxmath 2017/12/14 v1.53
+
+`newtxmath' v1.53, 2017/12/14 Math macros based on txfonts (msharpe)
+(c:/texlive/2018/texmf-dist/tex/latex/xkeyval/xkeyval.sty
+Package: xkeyval 2014/12/03 v2.7a package option processing (HA)
+
+(c:/texlive/2018/texmf-dist/tex/generic/xkeyval/xkeyval.tex
+(c:/texlive/2018/texmf-dist/tex/generic/xkeyval/xkvutils.tex
+\XKV@toks=\toks27
+\XKV@tempa@toks=\toks28
+
+(c:/texlive/2018/texmf-dist/tex/generic/xkeyval/keyval.tex))
+\XKV@depth=\count281
+File: xkeyval.tex 2014/12/03 v2.7a key=value parser (HA)
+))
+(c:/texlive/2018/texmf-dist/tex/latex/oberdiek/centernot.sty
+Package: centernot 2016/05/16 v1.4 Centers the not symbol horizontally (HO)
+)
+(c:/texlive/2018/texmf-dist/tex/generic/kastrup/binhex.tex)
+\tx@Isdigit=\count282
+LaTeX Font Info: Redeclaring symbol font `operators' on input line 156.
+LaTeX Font Info: Overwriting symbol font `operators' in version `normal'
+(Font) OT1/cmr/m/n --> OT1/entextrm/m/n on input line 156.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) OT1/cmr/bx/n --> OT1/entextrm/m/n on input line 156.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) OT1/entextrm/m/n --> OT1/entextrm/b/n on input line 157
+.
+LaTeX Font Info: Redeclaring math alphabet \mathsf on input line 163.
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `normal'
+(Font) OT1/cmss/m/n --> TU/entextsf/m/n on input line 163.
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `bold'
+(Font) OT1/cmss/bx/n --> TU/entextsf/m/n on input line 163.
+LaTeX Font Info: Redeclaring math alphabet \mathit on input line 164.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `normal'
+(Font) OT1/cmr/m/it --> OT1/entextrm/m/it on input line 164.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `bold'
+(Font) OT1/cmr/bx/it --> OT1/entextrm/m/it on input line 164.
+LaTeX Font Info: Redeclaring math alphabet \mathtt on input line 165.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `normal'
+(Font) OT1/cmtt/m/n --> TU/lmtt/m/n on input line 165.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `bold'
+(Font) OT1/cmtt/m/n --> TU/lmtt/m/n on input line 165.
+LaTeX Font Info: Redeclaring math alphabet \mathbf on input line 167.
+LaTeX Font Info: Overwriting math alphabet `\mathbf' in version `normal'
+(Font) OT1/cmr/bx/n --> OT1/entextrm/b/n on input line 167.
+LaTeX Font Info: Overwriting math alphabet `\mathbf' in version `bold'
+(Font) OT1/cmr/bx/n --> OT1/entextrm/b/n on input line 167.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `bold'
+(Font) OT1/entextrm/m/it --> OT1/entextrm/b/it on input line 1
+68.
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `bold'
+(Font) TU/entextsf/m/n --> TU/entextsf/b/n on input line 169.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `bold'
+(Font) TU/lmtt/m/n --> TU/lmtt/b/n on input line 170.
+LaTeX Font Info: Redeclaring symbol font `letters' on input line 221.
+LaTeX Font Info: Overwriting symbol font `letters' in version `normal'
+(Font) OML/cmm/m/it --> OML/ntxmi/m/it on input line 221.
+LaTeX Font Info: Overwriting symbol font `letters' in version `bold'
+(Font) OML/cmm/b/it --> OML/ntxmi/m/it on input line 221.
+LaTeX Font Info: Overwriting symbol font `letters' in version `bold'
+(Font) OML/ntxmi/m/it --> OML/ntxmi/b/it on input line 222.
+\symlettersA=\mathgroup6
+LaTeX Font Info: Overwriting symbol font `lettersA' in version `bold'
+(Font) U/ntxmia/m/it --> U/ntxmia/b/it on input line 238.
+LaTeX Font Info: Redeclaring math alphabet \mathfrak on input line 240.
+LaTeX Font Info: Redeclaring symbol font `symbols' on input line 259.
+LaTeX Font Info: Encoding `OMS' has changed to `LMS' for symbol font
+(Font) `symbols' in the math version `normal' on input line 259.
+LaTeX Font Info: Overwriting symbol font `symbols' in version `normal'
+(Font) OMS/cmsy/m/n --> LMS/ntxsy/m/n on input line 259.
+LaTeX Font Info: Encoding `OMS' has changed to `LMS' for symbol font
+(Font) `symbols' in the math version `bold' on input line 259.
+LaTeX Font Info: Overwriting symbol font `symbols' in version `bold'
+(Font) OMS/cmsy/b/n --> LMS/ntxsy/m/n on input line 259.
+LaTeX Font Info: Overwriting symbol font `symbols' in version `bold'
+(Font) LMS/ntxsy/m/n --> LMS/ntxsy/b/n on input line 260.
+\symAMSm=\mathgroup7
+LaTeX Font Info: Overwriting symbol font `AMSm' in version `bold'
+(Font) U/ntxsym/m/n --> U/ntxsym/b/n on input line 283.
+\symsymbolsC=\mathgroup8
+LaTeX Font Info: Overwriting symbol font `symbolsC' in version `bold'
+(Font) U/ntxsyc/m/n --> U/ntxsyc/b/n on input line 304.
+LaTeX Font Info: Redeclaring symbol font `largesymbols' on input line 317.
+LaTeX Font Info: Encoding `OMX' has changed to `LMX' for symbol font
+(Font) `largesymbols' in the math version `normal' on input line 3
+17.
+LaTeX Font Info: Overwriting symbol font `largesymbols' in version `normal'
+(Font) OMX/cmex/m/n --> LMX/ntxexx/m/n on input line 317.
+LaTeX Font Info: Encoding `OMX' has changed to `LMX' for symbol font
+(Font) `largesymbols' in the math version `bold' on input line 317
+.
+LaTeX Font Info: Overwriting symbol font `largesymbols' in version `bold'
+(Font) OMX/cmex/m/n --> LMX/ntxexx/m/n on input line 317.
+LaTeX Font Info: Overwriting symbol font `largesymbols' in version `bold'
+(Font) LMX/ntxexx/m/n --> LMX/ntxexx/b/n on input line 318.
+\symlargesymbolsTXA=\mathgroup9
+LaTeX Font Info: Overwriting symbol font `largesymbolsTXA' in version `bold'
+
+(Font) U/ntxexa/m/n --> U/ntxexa/b/n on input line 332.
+LaTeX Font Info: Redeclaring math delimiter \lgroup on input line 503.
+LaTeX Font Info: Redeclaring math delimiter \rgroup on input line 504.
+LaTeX Font Info: Redeclaring math delimiter \lmoustache on input line 505.
+LaTeX Font Info: Redeclaring math delimiter \rmoustache on input line 506.
+LaTeX Font Info: Redeclaring math delimiter \lfloor on input line 512.
+LaTeX Font Info: Redeclaring math delimiter \rfloor on input line 513.
+LaTeX Font Info: Redeclaring math delimiter \lceil on input line 514.
+LaTeX Font Info: Redeclaring math delimiter \rceil on input line 515.
+LaTeX Font Info: Redeclaring math delimiter \lbrace on input line 516.
+LaTeX Font Info: Redeclaring math delimiter \rbrace on input line 517.
+LaTeX Font Info: Redeclaring math delimiter \langle on input line 518.
+LaTeX Font Info: Redeclaring math delimiter \rangle on input line 520.
+LaTeX Font Info: Redeclaring math symbol \mid on input line 522.
+LaTeX Font Info: Redeclaring math delimiter \arrowvert on input line 524.
+LaTeX Font Info: Redeclaring math delimiter \vert on input line 525.
+LaTeX Font Info: Redeclaring math symbol \parallel on input line 526.
+LaTeX Font Info: Redeclaring math delimiter \Arrowvert on input line 527.
+LaTeX Font Info: Redeclaring math delimiter \Vert on input line 528.
+LaTeX Font Info: Redeclaring math delimiter \updownarrow on input line 530.
+LaTeX Font Info: Redeclaring math delimiter \Updownarrow on input line 531.
+LaTeX Font Info: Redeclaring math delimiter \lvert on input line 536.
+LaTeX Font Info: Redeclaring math delimiter \rvert on input line 537.
+LaTeX Font Info: Redeclaring math delimiter \lVert on input line 538.
+LaTeX Font Info: Redeclaring math delimiter \rVert on input line 539.
+LaTeX Info: Redefining \not on input line 2146.
+LaTeX Info: Redefining \textsquare on input line 2176.
+LaTeX Info: Redefining \openbox on input line 2178.
+)
+LaTeX Font Info: Redeclaring symbol font `operators' on input line 287.
+LaTeX Font Info: Overwriting symbol font `operators' in version `normal'
+(Font) OT1/entextrm/m/n --> OT1/ntxtlf/m/n on input line 287.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) OT1/entextrm/b/n --> OT1/ntxtlf/m/n on input line 287.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) OT1/ntxtlf/m/n --> OT1/ntxtlf/b/n on input line 288.
+
+(c:/texlive/2018/texmf-dist/tex/latex/jknapltx/mathrsfs.sty
+Package: mathrsfs 1996/01/01 Math RSFS package v1.0 (jk)
+\symrsfs=\mathgroup10
+)
+(c:/texlive/2018/texmf-dist/tex/latex/mathalfa/mathalfa.sty
+Package: mathalfa 2017/02/23 - 1.09mathalfa (License LPPL) Michael Sharpe
+LaTeX Font Info: Redeclaring math alphabet \mathcal on input line 189.
+LaTeX Font Info: Overwriting math alphabet `\mathcal' in version `bold'
+(Font) U/cmsy/m/n --> U/cmsy/b/n on input line 190.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/natbib/natbib.sty
+Package: natbib 2010/09/13 8.31b (PWD, AO)
+\bibhang=\skip68
+\bibsep=\skip69
+LaTeX Info: Redefining \cite on input line 694.
+\c@NAT@ctr=\count283
+)
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/graphicx.sty
+Package: graphicx 2017/06/01 v1.1a Enhanced LaTeX Graphics (DPC,SPQR)
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/graphics.sty
+Package: graphics 2017/06/25 v1.2c Standard LaTeX Graphics (DPC,SPQR)
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/trig.sty
+Package: trig 2016/01/03 v1.10 sin cos tan (DPC)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/graphics-cfg/graphics.cfg
+File: graphics.cfg 2016/06/04 v1.11 sample graphics configuration
+)
+Package graphics Info: Driver file: xetex.def on input line 99.
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics-def/xetex.def
+File: xetex.def 2017/06/24 v5.0h Graphics/color driver for xetex
+))
+\Gin@req@height=\dimen184
+\Gin@req@width=\dimen185
+)
+(c:/texlive/2018/texmf-dist/tex/latex/caption/caption.sty
+Package: caption 2016/02/21 v3.3-144 Customizing captions (AR)
+
+(c:/texlive/2018/texmf-dist/tex/latex/caption/caption3.sty
+Package: caption3 2016/05/22 v1.7-166 caption3 kernel (AR)
+Package caption3 Info: TeX engine: e-TeX on input line 67.
+\captionmargin=\dimen186
+\captionmargin@=\dimen187
+\captionwidth=\dimen188
+\caption@tempdima=\dimen189
+\caption@indent=\dimen190
+\caption@parindent=\dimen191
+\caption@hangindent=\dimen192
+)
+\c@ContinuedFloat=\count284
+)
+(c:/texlive/2018/texmf-dist/tex/latex/caption/subcaption.sty
+Package: subcaption 2016/05/22 v1.1-161 Sub-captions (AR)
+\c@subfigure=\count285
+\c@subtable=\count286
+)
+(c:/texlive/2018/texmf-dist/tex/latex/caption/bicaption.sty
+Package: bicaption 2016/03/27 v1.1-158 Bilingual Captions (AR)
+\bicaption@lang=\count287
+Package bicaption Info: main language is not set.
+)
+(c:/texlive/2018/texmf-dist/tex/latex/placeins/placeins.sty
+Package: placeins 2005/04/18 v 2.2
+)
+(c:/texlive/2018/texmf-dist/tex/latex/fancyhdr/fancyhdr.sty
+Package: fancyhdr 2017/06/30 v3.9a Extensive control of page headers and footer
+s
+\f@nch@headwidth=\skip70
+\f@nch@O@elh=\skip71
+\f@nch@O@erh=\skip72
+\f@nch@O@olh=\skip73
+\f@nch@O@orh=\skip74
+\f@nch@O@elf=\skip75
+\f@nch@O@erf=\skip76
+\f@nch@O@olf=\skip77
+\f@nch@O@orf=\skip78
+)
+(c:/texlive/2018/texmf-dist/tex/latex/xcolor/xcolor.sty
+Package: xcolor 2016/05/11 v2.12 LaTeX color extensions (UK)
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics-cfg/color.cfg
+File: color.cfg 2016/01/02 v1.6 sample color configuration
+)
+Package xcolor Info: Package option `usenames' ignored on input line 216.
+Package xcolor Info: Driver file: xetex.def on input line 225.
+
+(c:/texlive/2018/texmf-dist/tex/latex/colortbl/colortbl.sty
+Package: colortbl 2012/02/13 v1.0a Color table columns (DPC)
+
+(c:/texlive/2018/texmf-dist/tex/latex/tools/array.sty
+Package: array 2018/04/07 v2.4g Tabular extension package (FMi)
+\col@sep=\dimen193
+\ar@mcellbox=\box53
+\extrarowheight=\dimen194
+\NC@list=\toks29
+\extratabsurround=\skip79
+\backup@length=\skip80
+\ar@cellbox=\box54
+)
+\everycr=\toks30
+\minrowclearance=\skip81
+)
+\rownum=\count288
+Package xcolor Info: Model `cmy' substituted by `cmy0' on input line 1348.
+Package xcolor Info: Model `RGB' extended on input line 1364.
+Package xcolor Info: Model `HTML' substituted by `rgb' on input line 1366.
+Package xcolor Info: Model `Hsb' substituted by `hsb' on input line 1367.
+Package xcolor Info: Model `tHsb' substituted by `hsb' on input line 1368.
+Package xcolor Info: Model `HSB' substituted by `hsb' on input line 1369.
+Package xcolor Info: Model `Gray' substituted by `gray' on input line 1370.
+Package xcolor Info: Model `wave' substituted by `hsb' on input line 1371.
+
+(c:/texlive/2018/texmf-dist/tex/latex/graphics/dvipsnam.def
+File: dvipsnam.def 2016/06/17 v3.0m Driver-dependent file (DPC,SPQR)
+))
+(c:/texlive/2018/texmf-dist/tex/latex/tools/verbatim.sty
+Package: verbatim 2014/10/28 v1.5q LaTeX2e package for verbatim enhancements
+\every@verbatim=\toks31
+\verbatim@line=\toks32
+\verbatim@in@stream=\read2
+)
+(c:/texlive/2018/texmf-dist/tex/latex/enumitem/enumitem.sty
+Package: enumitem 2011/09/28 v3.5.2 Customized lists
+\enitkv@toks@=\toks33
+\labelindent=\skip82
+\enit@outerparindent=\dimen195
+\enit@toks=\toks34
+\enit@inbox=\box55
+\enitdp@description=\count289
+)
+(c:/texlive/2018/texmf-dist/tex/latex/listings/listings.sty
+\lst@mode=\count290
+\lst@gtempboxa=\box56
+\lst@token=\toks35
+\lst@length=\count291
+\lst@currlwidth=\dimen196
+\lst@column=\count292
+\lst@pos=\count293
+\lst@lostspace=\dimen197
+\lst@width=\dimen198
+\lst@newlines=\count294
+\lst@lineno=\count295
+\lst@maxwidth=\dimen199
+
+(c:/texlive/2018/texmf-dist/tex/latex/listings/lstmisc.sty
+File: lstmisc.sty 2015/06/04 1.6 (Carsten Heinz)
+\c@lstnumber=\count296
+\lst@skipnumbers=\count297
+\lst@framebox=\box57
+)
+(c:/texlive/2018/texmf-dist/tex/latex/listings/listings.cfg
+File: listings.cfg 2015/06/04 1.6 listings configuration
+))
+Package: listings 2015/06/04 1.6 (Carsten Heinz)
+
+(c:/texlive/2018/texmf-dist/tex/latex/algorithmicx/algpseudocode.sty
+Package: algpseudocode
+
+(c:/texlive/2018/texmf-dist/tex/latex/base/ifthen.sty
+Package: ifthen 2014/09/29 v1.1c Standard LaTeX ifthen package (DPC)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/algorithmicx/algorithmicx.sty
+Package: algorithmicx 2005/04/27 v1.2 Algorithmicx
+
+Document Style algorithmicx 1.2 - a greatly improved `algorithmic' style
+\c@ALG@line=\count298
+\c@ALG@rem=\count299
+\c@ALG@nested=\count300
+\ALG@tlm=\skip83
+\ALG@thistlm=\skip84
+\c@ALG@Lnr=\count301
+\c@ALG@blocknr=\count302
+\c@ALG@storecount=\count303
+\c@ALG@tmpcounter=\count304
+\ALG@tmplength=\skip85
+)
+Document Style - pseudocode environments for use with the `algorithmicx' style
+) (c:/texlive/2018/texmf-dist/tex/latex/algorithms/algorithm.sty
+Invalid UTF-8 byte or sequence at line 11 replaced by U+FFFD.
+Package: algorithm 2009/08/24 v0.1 Document Style `algorithm' - floating enviro
+nment
+
+(c:/texlive/2018/texmf-dist/tex/latex/float/float.sty
+Package: float 2001/11/08 v1.3d Float enhancements (AL)
+\c@float@type=\count305
+\float@exts=\toks36
+\float@box=\box58
+\@float@everytoks=\toks37
+\@floatcapt=\box59
+)
+\@float@every@algorithm=\toks38
+\c@algorithm=\count306
+)
+(c:/texlive/2018/texmf-dist/tex/latex/hyperref/hyperref.sty
+Package: hyperref 2018/02/06 v6.86b Hypertext links for LaTeX
+
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/hobsub-hyperref.sty
+Package: hobsub-hyperref 2016/05/16 v1.14 Bundle oberdiek, subset hyperref (HO)
+
+
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/hobsub-generic.sty
+Package: hobsub-generic 2016/05/16 v1.14 Bundle oberdiek, subset generic (HO)
+Package: hobsub 2016/05/16 v1.14 Construct package bundles (HO)
+Package: infwarerr 2016/05/16 v1.4 Providing info/warning/error messages (HO)
+Package: ltxcmds 2016/05/16 v1.23 LaTeX kernel commands for general use (HO)
+Package hobsub Info: Skipping package `ifluatex' (already loaded).
+Package: ifvtex 2016/05/16 v1.6 Detect VTeX and its facilities (HO)
+Package ifvtex Info: VTeX not detected.
+Package: intcalc 2016/05/16 v1.2 Expandable calculations with integers (HO)
+Package: ifpdf 2017/03/15 v3.2 Provides the ifpdf switch
+Package: etexcmds 2016/05/16 v1.6 Avoid name clashes with e-TeX commands (HO)
+Package etexcmds Info: Could not find \expanded.
+(etexcmds) That can mean that you are not using pdfTeX 1.50 or
+(etexcmds) that some package has redefined \expanded.
+(etexcmds) In the latter case, load this package earlier.
+Package: kvsetkeys 2016/05/16 v1.17 Key value parser (HO)
+Package: kvdefinekeys 2016/05/16 v1.4 Define keys (HO)
+Package: pdftexcmds 2018/01/30 v0.27 Utility functions of pdfTeX for LuaTeX (HO
+)
+Package pdftexcmds Info: LuaTeX not detected.
+Package pdftexcmds Info: pdfTeX >= 1.30 not detected.
+Package pdftexcmds Info: \pdf@primitive is available.
+Package pdftexcmds Info: \pdf@ifprimitive is available.
+Package pdftexcmds Info: \pdfdraftmode not found.
+Package: pdfescape 2016/05/16 v1.14 Implements pdfTeX's escape features (HO)
+Package: bigintcalc 2016/05/16 v1.4 Expandable calculations on big integers (HO
+)
+Package: bitset 2016/05/16 v1.2 Handle bit-vector datatype (HO)
+Package: uniquecounter 2016/05/16 v1.3 Provide unlimited unique counter (HO)
+)
+Package hobsub Info: Skipping package `hobsub' (already loaded).
+Package: letltxmacro 2016/05/16 v1.5 Let assignment for LaTeX macros (HO)
+Package: hopatch 2016/05/16 v1.3 Wrapper for package hooks (HO)
+Package: xcolor-patch 2016/05/16 xcolor patch
+Package: atveryend 2016/05/16 v1.9 Hooks at the very end of document (HO)
+Package: atbegshi 2016/06/09 v1.18 At begin shipout hook (HO)
+Package: refcount 2016/05/16 v3.5 Data extraction from label references (HO)
+Package: hycolor 2016/05/16 v1.8 Color options for hyperref/bookmark (HO)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/oberdiek/auxhook.sty
+Package: auxhook 2016/05/16 v1.4 Hooks for auxiliary files (HO)
+)
+(c:/texlive/2018/texmf-dist/tex/latex/oberdiek/kvoptions.sty
+Package: kvoptions 2016/05/16 v3.12 Key value format for package options (HO)
+)
+\@linkdim=\dimen256
+\Hy@linkcounter=\count307
+\Hy@pagecounter=\count308
+
+(c:/texlive/2018/texmf-dist/tex/latex/hyperref/pd1enc.def
+File: pd1enc.def 2018/02/06 v6.86b Hyperref: PDFDocEncoding definition (HO)
+)
+\Hy@SavedSpaceFactor=\count309
+
+(c:/texlive/2018/texmf-dist/tex/latex/latexconfig/hyperref.cfg
+File: hyperref.cfg 2002/06/06 v1.2 hyperref configuration of TeXLive
+)
+Package hyperref Info: Option `unicode' set `true' on input line 4383.
+
+(c:/texlive/2018/texmf-dist/tex/latex/hyperref/puenc.def
+File: puenc.def 2018/02/06 v6.86b Hyperref: PDF Unicode definition (HO)
+)
+Package hyperref Info: Hyper figures OFF on input line 4509.
+Package hyperref Info: Link nesting OFF on input line 4514.
+Package hyperref Info: Hyper index ON on input line 4517.
+Package hyperref Info: Plain pages OFF on input line 4524.
+Package hyperref Info: Backreferencing OFF on input line 4529.
+Package hyperref Info: Implicit mode ON; LaTeX internals redefined.
+Package hyperref Info: Bookmarks ON on input line 4762.
+\c@Hy@tempcnt=\count310
+
+(c:/texlive/2018/texmf-dist/tex/latex/url/url.sty
+\Urlmuskip=\muskip18
+Package: url 2013/09/16 ver 3.4 Verb mode for urls, etc.
+)
+LaTeX Info: Redefining \url on input line 5115.
+\XeTeXLinkMargin=\dimen257
+\Fld@menulength=\count311
+\Field@Width=\dimen258
+\Fld@charsize=\dimen259
+Package hyperref Info: Hyper figures OFF on input line 6369.
+Package hyperref Info: Link nesting OFF on input line 6374.
+Package hyperref Info: Hyper index ON on input line 6377.
+Package hyperref Info: backreferencing OFF on input line 6384.
+Package hyperref Info: Link coloring OFF on input line 6389.
+Package hyperref Info: Link coloring with OCG OFF on input line 6394.
+Package hyperref Info: PDF/A mode OFF on input line 6399.
+LaTeX Info: Redefining \ref on input line 6439.
+LaTeX Info: Redefining \pageref on input line 6443.
+\Hy@abspage=\count312
+\c@Item=\count313
+\c@Hfootnote=\count314
+)
+Package hyperref Info: Driver (autodetected): hxetex.
+
+(c:/texlive/2018/texmf-dist/tex/latex/hyperref/hxetex.def
+File: hxetex.def 2018/02/06 v6.86b Hyperref driver for XeTeX
+
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/stringenc.sty
+Package: stringenc 2016/05/16 v1.11 Convert strings between diff. encodings (HO
+)
+)
+\pdfm@box=\box60
+\c@Hy@AnnotLevel=\count315
+\HyField@AnnotCount=\count316
+\Fld@listcount=\count317
+\c@bookmark@seq@number=\count318
+
+(c:/texlive/2018/texmf-dist/tex/latex/oberdiek/rerunfilecheck.sty
+Package: rerunfilecheck 2016/05/16 v1.8 Rerun checks for auxiliary files (HO)
+Package uniquecounter Info: New unique counter `rerunfilecheck' on input line 2
+82.
+)
+\Hy@SectionHShift=\skip86
+)
+Package hyperref Info: Option `pdffitwindow' set `false' on input line 460.
+Package hyperref Info: Option `colorlinks' set `true' on input line 460.
+Package hyperref Info: Option `bookmarksnumbered' set `true' on input line 460.
+
+)
+(./Style/artracom.sty
+
+LaTeX Warning: You have requested package `Style/artracom',
+ but the package provides `artracom'.
+
+Package: artracom 2014/10/01 v0.1 LaTeX macros package
+) (c:/texlive/2018/texmf-dist/tex/latex/multirow/multirow.sty
+Package: multirow 2016/11/25 v2.2 Span multiple rows of a table
+\multirow@colwidth=\skip87
+\multirow@cntb=\count319
+\multirow@dima=\skip88
+\bigstrutjot=\dimen260
+)
+(c:/texlive/2018/texmf-dist/tex/latex/tools/tabularx.sty
+Package: tabularx 2016/02/03 v2.11b `tabularx' package (DPC)
+\TX@col@width=\dimen261
+\TX@old@table=\dimen262
+\TX@old@col=\dimen263
+\TX@target=\dimen264
+\TX@delta=\dimen265
+\TX@cols=\count320
+\TX@ftn=\toks39
+)
+(c:/texlive/2018/texmf-dist/tex/latex/listings/lstlang1.sty
+File: lstlang1.sty 2015/06/04 1.6 listings language file
+)
+(c:/texlive/2018/texmf-dist/tex/latex/listings/lstlang1.sty
+File: lstlang1.sty 2015/06/04 1.6 listings language file
+)
+(c:/texlive/2018/texmf-dist/tex/latex/listings/lstmisc.sty
+File: lstmisc.sty 2015/06/04 1.6 (Carsten Heinz)
+)
+(c:/texlive/2018/texmf-dist/tex/xelatex/xecjk/xeCJK-listings.sty
+Package: xeCJK-listings 2018/04/07 v3.7.0 xeCJK patch file for listings
+\l__xeCJK_listings_max_char_int=\count321
+\l__xeCJK_listings_flag_int=\count322
+)
+(Tmp/Thesis.aux
+
+LaTeX Warning: Label `chap:introduction' multiply defined.
+
+
+LaTeX Warning: Label `chap:introduction' multiply defined.
+
+
+LaTeX Warning: Label `chap:introduction' multiply defined.
+
+
+LaTeX Warning: Label `chap:introduction' multiply defined.
+
+
+LaTeX Warning: Label `chap:introduction' multiply defined.
+
+)
+\openout1 = `Thesis.aux'.
+
+LaTeX Font Info: Checking defaults for OML/ntxmi/m/it on input line 66.
+LaTeX Font Info: Try loading font information for OML+ntxmi on input line 66
+.
+ (c:/texlive/2018/texmf-dist/tex/latex/newtx/omlntxmi.fd
+File: omlntxmi.fd 2015/08/25 Fontinst v1.933 font definitions for OML/ntxmi.
+)
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for T1/cmr/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for OT1/cmr/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for OMS/cmsy/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for TU/lmr/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for U/ntxexa/m/n on input line 66.
+LaTeX Font Info: Try loading font information for U+ntxexa on input line 66.
+
+
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/untxexa.fd
+File: untxexa.fd 2012/04/16 Fontinst v1.933 font definitions for U/ntxexa.
+)
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for TS1/cmr/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for LMS/ntxsy/m/n on input line 66.
+LaTeX Font Info: Try loading font information for LMS+ntxsy on input line 66
+.
+
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/lmsntxsy.fd
+File: lmsntxsy.fd 2016/07/02 Fontinst v1.933 font definitions for LMS/ntxsy.
+)
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for LMX/ntxexx/m/n on input line 66.
+LaTeX Font Info: Try loading font information for LMX+ntxexx on input line 6
+6.
+
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/lmxntxexx.fd
+File: lmxntxexx.fd 2016/07/03 Fontinst v1.933 font definitions for LMX/ntxexx.
+)
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for PD1/pdf/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+LaTeX Font Info: Checking defaults for PU/pdf/m/n on input line 66.
+LaTeX Font Info: ... okay on input line 66.
+
+ABD: EverySelectfont initializing macros
+LaTeX Info: Redefining \selectfont on input line 66.
+.................................................
+. fontspec info: "setup-math"
+.
+. Adjusting the maths setup (use [no-math] to avoid this).
+.................................................
+\symlegacymaths=\mathgroup11
+LaTeX Font Info: Overwriting symbol font `legacymaths' in version `bold'
+(Font) OT1/cmr/m/n --> OT1/cmr/bx/n on input line 66.
+LaTeX Font Info: Redeclaring math accent \acute on input line 66.
+LaTeX Font Info: Redeclaring math accent \grave on input line 66.
+LaTeX Font Info: Redeclaring math accent \ddot on input line 66.
+LaTeX Font Info: Redeclaring math accent \tilde on input line 66.
+LaTeX Font Info: Redeclaring math accent \bar on input line 66.
+LaTeX Font Info: Redeclaring math accent \breve on input line 66.
+LaTeX Font Info: Redeclaring math accent \check on input line 66.
+LaTeX Font Info: Redeclaring math accent \hat on input line 66.
+LaTeX Font Info: Redeclaring math accent \dot on input line 66.
+LaTeX Font Info: Redeclaring math accent \mathring on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Gamma on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Delta on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Theta on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Lambda on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Xi on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Pi on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Sigma on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Upsilon on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Phi on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Psi on input line 66.
+LaTeX Font Info: Redeclaring math symbol \Omega on input line 66.
+LaTeX Font Info: Redeclaring math symbol \mathdollar on input line 66.
+LaTeX Font Info: Redeclaring symbol font `operators' on input line 66.
+LaTeX Font Info: Encoding `OT1' has changed to `TU' for symbol font
+(Font) `operators' in the math version `normal' on input line 66.
+LaTeX Font Info: Overwriting symbol font `operators' in version `normal'
+(Font) OT1/ntxtlf/m/n --> TU/entextrm/m/n on input line 66.
+LaTeX Font Info: Encoding `OT1' has changed to `TU' for symbol font
+(Font) `operators' in the math version `bold' on input line 66.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) OT1/ntxtlf/b/n --> TU/entextrm/m/n on input line 66.
+LaTeX Font Info: Overwriting symbol font `operators' in version `normal'
+(Font) TU/entextrm/m/n --> TU/entextrm/m/n on input line 66.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `normal'
+(Font) OT1/entextrm/m/it --> TU/entextrm/m/it on input line 66
+.
+LaTeX Font Info: Overwriting math alphabet `\mathbf' in version `normal'
+(Font) OT1/entextrm/b/n --> TU/entextrm/bx/n on input line 66.
+
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `normal'
+(Font) TU/entextsf/m/n --> TU/entextsf/m/n on input line 66.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `normal'
+(Font) TU/lmtt/m/n --> TU/lmtt/m/n on input line 66.
+LaTeX Font Info: Overwriting symbol font `operators' in version `bold'
+(Font) TU/entextrm/m/n --> TU/entextrm/bx/n on input line 66.
+LaTeX Font Info: Overwriting math alphabet `\mathit' in version `bold'
+(Font) OT1/entextrm/b/it --> TU/entextrm/bx/it on input line 6
+6.
+LaTeX Font Info: Overwriting math alphabet `\mathsf' in version `bold'
+(Font) TU/entextsf/b/n --> TU/entextsf/bx/n on input line 66.
+LaTeX Font Info: Overwriting math alphabet `\mathtt' in version `bold'
+(Font) TU/lmtt/b/n --> TU/lmtt/bx/n on input line 66.
+Package caption Info: Begin \AtBeginDocument code.
+Package caption Info: float package is loaded.
+Package caption Info: hyperref package is loaded.
+Package caption Info: listings package is loaded.
+Package caption Info: End \AtBeginDocument code.
+\c@lstlisting=\count323
+\AtBeginShipoutBox=\box61
+Package hyperref Info: Link coloring ON on input line 66.
+
+(c:/texlive/2018/texmf-dist/tex/latex/hyperref/nameref.sty
+Package: nameref 2016/05/21 v2.44 Cross-referencing by name of section
+
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/gettitlestring.sty
+Package: gettitlestring 2016/05/16 v1.5 Cleanup title references (HO)
+)
+\c@section@level=\count324
+)
+LaTeX Info: Redefining \ref on input line 66.
+LaTeX Info: Redefining \pageref on input line 66.
+LaTeX Info: Redefining \nameref on input line 66.
+
+(Tmp/Thesis.out) (Tmp/Thesis.out)
+\@outlinefile=\write3
+\openout3 = `Thesis.out'.
+
+ (./Tex/Frontmatter.tex
+File: ucas_logo.pdf Graphic file (type pdf)
+<use ucas_logo.pdf>
+\g__fontspec_family_SimHei_int=\count325
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "SimHei/I" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "SimHei/BI" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "no-font-shape"
+.
+. Could not resolve font "SimHei/I" (it probably doesn't exist).
+.................................................
+.................................................
+. fontspec info: "defining-font"
+.
+. Font family 'SimHei(0)' created for font 'SimHei' with options
+. [Script={CJK},AutoFakeBold={4}].
+.
+. This font family consists of the following NFSS series/shapes:
+.
+. - 'normal' (m/n) with NFSS spec.: <->"SimHei/OT:script=hani;language=DFLT;"
+. - 'small caps' (m/sc) with NFSS spec.:
+. - 'bold' (bx/n) with NFSS spec.:
+. <->"SimHei/OT:script=hani;language=DFLT;embolden=4;"
+. - 'bold small caps' (bx/sc) with NFSS spec.:
+.................................................
+LaTeX Font Info: Try loading font information for U+msa on input line 32.
+
+(c:/texlive/2018/texmf-dist/tex/latex/amsfonts/umsa.fd
+File: umsa.fd 2013/01/14 v3.01 AMS symbols A
+)
+LaTeX Font Info: Try loading font information for U+msb on input line 32.
+
+(c:/texlive/2018/texmf-dist/tex/latex/amsfonts/umsb.fd
+File: umsb.fd 2013/01/14 v3.01 AMS symbols B
+)
+LaTeX Font Info: Try loading font information for U+ntxmia on input line 32.
+
+
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/untxmia.fd
+File: untxmia.fd 2016/01/31 Fontinst v1.933 font definitions for U/ntxmia.
+)
+LaTeX Font Info: Try loading font information for U+ntxsym on input line 32.
+
+
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/untxsym.fd
+File: untxsym.fd 2015/03/20 Fontinst v1.933 font definitions for U/ntxsym.
+)
+LaTeX Font Info: Try loading font information for U+ntxsyc on input line 32.
+
+
+(c:/texlive/2018/texmf-dist/tex/latex/newtx/untxsyc.fd
+File: untxsyc.fd 2012/04/12 Fontinst v1.933 font definitions for U/ntxsyc.
+)
+LaTeX Font Info: Try loading font information for U+rsfs on input line 32.
+
+(c:/texlive/2018/texmf-dist/tex/latex/jknapltx/ursfs.fd
+File: ursfs.fd 1998/03/24 rsfs font definition file (jk)
+)
+
+LaTeX Font Warning: Font shape `U/rsfs/m/n' in size <10.53937> not available
+(Font) size <10.95> substituted on input line 32.
+
+
+LaTeX Font Warning: Font shape `U/rsfs/m/n' in size <7.52812> not available
+(Font) size <8> substituted on input line 32.
+
+(c:/texlive/2018/texmf-dist/tex/generic/oberdiek/se-ascii-print.def
+File: se-ascii-print.def 2016/05/16 v1.11 stringenc: Printable ASCII characters
+
+) [1
+
+
+
+]
+[2
+
+
+] [3
+
+] [4
+
+
+] [5
+
+] [6
+
+
+] [1
+
+] [2
+
+]) [3] [4] (Tmp/Thesis.toc [5
+
+])
+\tf@toc=\write4
+\openout4 = `Thesis.toc'.
+
+ [6] (Tmp/Thesis.lof)
+\tf@lof=\write5
+\openout5 = `Thesis.lof'.
+
+
+[7
+
+] [8
+
+] (Tmp/Thesis.lot)
+\tf@lot=\write6
+\openout6 = `Thesis.lot'.
+
+ [9] [10
+
+] (./Tex/Mainmatter.tex (./Tex/Chap_1.tex
+第1章
+[1
+
+] [2]
+File: youtube.png Graphic file (type bmp)
+<youtube.png>
+ [3] [4]
+File: Content-based.png Graphic file (type bmp)
+<Content-based.png>
+ [5] [6] [7] [8]) (./Tex/Chap_2.tex [9] [10]
+第2章
+[11
+
+]
+File: Multi-threaded.jpg Graphic file (type bmp)
+<Multi-threaded.jpg>
+ [12] [13] [14]) (./Tex/Chap_3.tex [15] [16
+
+]
+第3章
+File: Typicaldecision.jpg Graphic file (type bmp)
+<Typicaldecision.jpg>
+[17] [18] [19]
+File: Flowchartofrequest_fi.png Graphic file (type bmp)
+<Flowchartofrequest_fi.png>
+File: Flowchartofrequest_ag.png Graphic file (type bmp)
+<Flowchartofrequest_ag.png>
+File: Frameworkforfeature.jpg Graphic file (type bmp)
+<Frameworkforfeature.jpg>
+ [20] [21] [22]
+File: Allaudio.png Graphic file (type bmp)
+<Allaudio.png>
+ [23] [24]
+File: 80sizedistribution.png Graphic file (type bmp)
+<80sizedistribution.png>
+ [25] [26] [27] [28]) (./Tex/Chap_4.tex
+[29] [30
+
+]
+第4章
+[31]
+File: Flowchartofpredictiveprogram.png Graphic file (type bmp)
+<Flowchartofpredictiveprogram.png>
+ [32]
+File: numberflow.png Graphic file (type bmp)
+<numberflow.png>
+ [33] [34]) (./Tex/Chap_5.tex [35] [36
+
+]
+第5章
+File: systemframework.png Graphic file (type bmp)
+<systemframework.png>
+File: test.png Graphic file (type bmp)
+<test.png>
+[37]
+File: result_a.png Graphic file (type bmp)
+<result_a.png>
+File: result_b.png Graphic file (type bmp)
+<result_b.png>
+File: result_c.png Graphic file (type bmp)
+<result_c.png>
+File: result_d.png Graphic file (type bmp)
+<result_d.png>
+ [38] [39] [40]) (./Tex/Chap_6.tex [41] [42
+
+]
+第6章
+[43])) [44] [45] [46
+
+] (Tmp/Thesis.bbl [47
+
+
+
+]
+Underfull \hbox (badness 1622) in paragraph at lines 163--169
+[]\TU/entextrm/m/n/10.53937 LI S, SCHLIEP M, HOPPER N. Facet: Streaming over V
+ideoconferencing for Censor-
+ []
+
+
+Underfull \hbox (badness 2452) in paragraph at lines 163--169
+\TU/entextrm/m/n/10.53937 ship Circumvention.[J/OL]. Wpes, 2014:163-172. [][]
+$[][][][] [] [] [] [][] [] [][][] [] [][][] [] [][][][][][][][] [] [][][] [] []
+[][][] []
+ []
+
+
+Underfull \hbox (badness 10000) in paragraph at lines 163--169
+[][][][][][][] [] [][][][][][][][][][][][][][][][] [] [] [] [][][] [] [][][] []
+ [][][] [] [][] [] [][][][] [] [][][][][][][] [] [][][][][][][][][][][][][][][]
+[][][][][] []
+ []
+
+[48]
+Underfull \hbox (badness 1895) in paragraph at lines 266--270
+[]\TU/entextrm/m/n/10.53937 SANDVINE. 2018-internet-phenomena-report[R/OL]. 2
+018. [][]$[][][][][] [] [] [] [][][] [] [][][][][][][][] [] [][][] []
+ []
+
+[49]) (./Tex/Backmatter.tex [50] [51
+
+] [52
+
+] [53] [54
+
+])
+Package atveryend Info: Empty hook `BeforeClearDocument' on input line 102.
+Package atveryend Info: Empty hook `AfterLastShipout' on input line 102.
+ (Tmp/Thesis.aux)
+Package atveryend Info: Empty hook `AtVeryEndDocument' on input line 102.
+Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 102.
+Package rerunfilecheck Info: File `Thesis.out' has not changed.
+(rerunfilecheck) Checksum: F23E34B5E9AF2286626BB637E82675D3.
+
+
+LaTeX Font Warning: Size substitutions with differences
+(Font) up to 0.47188pt have occurred.
+
+
+LaTeX Warning: There were multiply-defined labels.
+
+ )
+Here is how much of TeX's memory you used:
+ 30200 strings out of 492970
+ 594967 string characters out of 6137298
+ 686656 words of memory out of 5000000
+ 33737 multiletter control sequences out of 15000+600000
+ 564218 words of font info for 159 fonts, out of 8000000 for 9000
+ 1348 hyphenation exceptions out of 8191
+ 60i,12n,97p,10526b,1120s stack positions out of 5000i,500n,10000p,200000b,80000s
+
+Output written on Tmp/Thesis.pdf (70 pages).
diff --git a/Tmp/Thesis.lot b/Tmp/Thesis.lot
new file mode 100644
index 0000000..6f3437d
--- /dev/null
+++ b/Tmp/Thesis.lot
@@ -0,0 +1,20 @@
+\addvspace {10.0pt}
+\contentsline {table}{\numberline {1.1}{\ignorespaces 网民人数以及音视频用户人数\relax }}{1}{table.caption.6}
+\contentsline {table}{\numberline {1.2}{\ignorespaces 不同应用网络流量占比\relax }}{2}{table.caption.7}
+\contentsline {table}{\numberline {1.3}{\ignorespaces 美国下载流量排行\relax }}{2}{table.caption.8}
+\contentsline {table}{\numberline {1.4}{\ignorespaces 国内外研究现状小结\relax }}{8}{table.caption.11}
+\addvspace {10.0pt}
+\addvspace {10.0pt}
+\contentsline {table}{\numberline {3.1}{\ignorespaces 数据集特征\relax }}{24}{table.caption.18}
+\contentsline {table}{\numberline {3.2}{\ignorespaces 所选特征和其特征量化方式\relax }}{26}{table.caption.20}
+\contentsline {table}{\numberline {3.3}{\ignorespaces 特征信息熵结果\relax }}{28}{table.caption.21}
+\contentsline {table}{\numberline {3.4}{\ignorespaces 特征互信息结果\relax }}{29}{table.caption.22}
+\addvspace {10.0pt}
+\contentsline {table}{\numberline {4.1}{\ignorespaces 误报率结果\relax }}{34}{table.caption.25}
+\contentsline {table}{\numberline {4.2}{\ignorespaces 漏报率结果\relax }}{34}{table.caption.26}
+\contentsline {table}{\numberline {4.3}{\ignorespaces 漏报率结果\relax }}{35}{table.caption.27}
+\addvspace {10.0pt}
+\contentsline {table}{\numberline {5.1}{\ignorespaces 内容分析模块特定音视频文件召回率\relax }}{38}{table.caption.30}
+\addvspace {10.0pt}
+\addvspace {10.0pt}
+\addvspace {10.0pt}
diff --git a/Tmp/Thesis.out b/Tmp/Thesis.out
new file mode 100644
index 0000000..8148b6e
--- /dev/null
+++ b/Tmp/Thesis.out
@@ -0,0 +1,66 @@
+\BOOKMARK [0][-]{chapter.1}{第1章 绪论}{}% 1
+\BOOKMARK [1][-]{section.1.1}{1.1 研究背景及意义}{chapter.1}% 2
+\BOOKMARK [1][-]{section.1.2}{1.2 系统要求}{chapter.1}% 3
+\BOOKMARK [1][-]{section.1.3}{1.3 国内外研究现状}{chapter.1}% 4
+\BOOKMARK [2][-]{subsection.1.3.1}{1.3.1 研究现状概述}{section.1.3}% 5
+\BOOKMARK [2][-]{subsection.1.3.2}{1.3.2 数据链路层的网络冗余发现系统}{section.1.3}% 6
+\BOOKMARK [2][-]{subsection.1.3.3}{1.3.3 网络层的网络冗余发现系统}{section.1.3}% 7
+\BOOKMARK [2][-]{subsection.1.3.4}{1.3.4 应用层的网络冗余发现系统}{section.1.3}% 8
+\BOOKMARK [2][-]{subsection.1.3.5}{1.3.5 国内外研究现状小结}{section.1.3}% 9
+\BOOKMARK [1][-]{section.1.4}{1.4 论文的主要内容与组织结构}{chapter.1}% 10
+\BOOKMARK [2][-]{subsection.1.4.1}{1.4.1 论文的主要内容}{section.1.4}% 11
+\BOOKMARK [2][-]{subsection.1.4.2}{1.4.2 组织结构}{section.1.4}% 12
+\BOOKMARK [1][-]{section.1.5}{1.5 小结}{chapter.1}% 13
+\BOOKMARK [0][-]{chapter.2}{第2章 前人的工作基础}{}% 14
+\BOOKMARK [1][-]{section.2.1}{2.1 使用流式模糊哈希的原因及其优点}{chapter.2}% 15
+\BOOKMARK [1][-]{section.2.2}{2.2 使用基于流式模糊哈希的相似性查找系统的原因及优点}{chapter.2}% 16
+\BOOKMARK [1][-]{section.2.3}{2.3 小结}{chapter.2}% 17
+\BOOKMARK [0][-]{chapter.3}{第3章 预测性文件标识生成方法}{}% 18
+\BOOKMARK [1][-]{section.3.1}{3.1 背景知识}{chapter.3}% 19
+\BOOKMARK [2][-]{subsection.3.1.1}{3.1.1 决策树}{section.3.1}% 20
+\BOOKMARK [2][-]{subsection.3.1.2}{3.1.2 贝叶斯分类方法}{section.3.1}% 21
+\BOOKMARK [2][-]{subsection.3.1.3}{3.1.3\040URL}{section.3.1}% 22
+\BOOKMARK [2][-]{subsection.3.1.4}{3.1.4 HTTP缓存}{section.3.1}% 23
+\BOOKMARK [2][-]{subsection.3.1.5}{3.1.5 特征选择}{section.3.1}% 24
+\BOOKMARK [2][-]{subsection.3.1.6}{3.1.6 信息熵}{section.3.1}% 25
+\BOOKMARK [2][-]{subsection.3.1.7}{3.1.7 互信息}{section.3.1}% 26
+\BOOKMARK [1][-]{section.3.2}{3.2 重复音视频文件预测实验}{chapter.3}% 27
+\BOOKMARK [2][-]{subsection.3.2.1}{3.2.1 实验思路}{section.3.2}% 28
+\BOOKMARK [2][-]{subsection.3.2.2}{3.2.2 机器学习算法选择}{section.3.2}% 29
+\BOOKMARK [2][-]{subsection.3.2.3}{3.2.3 实验数据}{section.3.2}% 30
+\BOOKMARK [2][-]{subsection.3.2.4}{3.2.4 数据标注}{section.3.2}% 31
+\BOOKMARK [2][-]{subsection.3.2.5}{3.2.5 机器学习库}{section.3.2}% 32
+\BOOKMARK [2][-]{subsection.3.2.6}{3.2.6 特征量化}{section.3.2}% 33
+\BOOKMARK [2][-]{subsection.3.2.7}{3.2.7 实验结果}{section.3.2}% 34
+\BOOKMARK [1][-]{section.3.3}{3.3 预测性文件标识生成}{chapter.3}% 35
+\BOOKMARK [2][-]{subsection.3.3.1}{3.3.1 步骤}{section.3.3}% 36
+\BOOKMARK [2][-]{subsection.3.3.2}{3.3.2 特征选择评价标准}{section.3.3}% 37
+\BOOKMARK [2][-]{subsection.3.3.3}{3.3.3 实验数据}{section.3.3}% 38
+\BOOKMARK [2][-]{subsection.3.3.4}{3.3.4 音视频特征的信息熵}{section.3.3}% 39
+\BOOKMARK [2][-]{subsection.3.3.5}{3.3.5 音视频特征的互信息}{section.3.3}% 40
+\BOOKMARK [1][-]{section.3.4}{3.4 小结}{chapter.3}% 41
+\BOOKMARK [0][-]{chapter.4}{第4章 基于流式模糊哈希的重复音视频检测方法}{}% 42
+\BOOKMARK [1][-]{section.4.1}{4.1 背景知识}{chapter.4}% 43
+\BOOKMARK [2][-]{subsection.4.1.1}{4.1.1 单向流}{section.4.1}% 44
+\BOOKMARK [2][-]{subsection.4.1.2}{4.1.2 流式模糊哈希相似不具有传递性}{section.4.1}% 45
+\BOOKMARK [1][-]{section.4.2}{4.2 基于流式模糊哈希的重复音视频检测方法}{chapter.4}% 46
+\BOOKMARK [2][-]{subsection.4.2.1}{4.2.1 步骤}{section.4.2}% 47
+\BOOKMARK [2][-]{subsection.4.2.2}{4.2.2 音视频文件编号}{section.4.2}% 48
+\BOOKMARK [2][-]{subsection.4.2.3}{4.2.3 预测性文件标识评价}{section.4.2}% 49
+\BOOKMARK [1][-]{section.4.3}{4.3 重复音视频检测方法的可行性验证}{chapter.4}% 50
+\BOOKMARK [2][-]{subsection.4.3.1}{4.3.1 预测性文件标识的误报率}{section.4.3}% 51
+\BOOKMARK [2][-]{subsection.4.3.2}{4.3.2 预测性文件标识的漏报率}{section.4.3}% 52
+\BOOKMARK [1][-]{section.4.4}{4.4 小结}{chapter.4}% 53
+\BOOKMARK [0][-]{chapter.5}{第5章 系统设计与实现}{}% 54
+\BOOKMARK [1][-]{section.5.1}{5.1 系统概述}{chapter.5}% 55
+\BOOKMARK [1][-]{section.5.2}{5.2 系统测试}{chapter.5}% 56
+\BOOKMARK [2][-]{subsection.5.2.1}{5.2.1 测试原理}{section.5.2}% 57
+\BOOKMARK [2][-]{subsection.5.2.2}{5.2.2 特定音视频文件召回率测试}{section.5.2}% 58
+\BOOKMARK [2][-]{subsection.5.2.3}{5.2.3 系统去重效果测试}{section.5.2}% 59
+\BOOKMARK [1][-]{section.5.3}{5.3 小结}{chapter.5}% 60
+\BOOKMARK [0][-]{chapter.6}{第6章 总结与展望}{}% 61
+\BOOKMARK [1][-]{section.6.1}{6.1 全文总结}{chapter.6}% 62
+\BOOKMARK [1][-]{section.6.2}{6.2 研究展望}{chapter.6}% 63
+\BOOKMARK [0][-]{section*.35}{参考文献}{}% 64
+\BOOKMARK [0][-]{appendix*.37}{致谢}{}% 65
+\BOOKMARK [0][-]{appendix*.38}{作者简历及攻读学位期间发表的学术论文与研究成果}{}% 66
diff --git a/Tmp/Thesis.toc b/Tmp/Thesis.toc
new file mode 100644
index 0000000..ea6e928
--- /dev/null
+++ b/Tmp/Thesis.toc
@@ -0,0 +1,66 @@
+\contentsline {chapter}{\numberline {第1章\hspace {.3em}}绪论}{1}{chapter.1}
+\contentsline {section}{\numberline {1.1}研究背景及意义}{1}{section.1.1}
+\contentsline {section}{\numberline {1.2}系统要求}{3}{section.1.2}
+\contentsline {section}{\numberline {1.3}国内外研究现状}{4}{section.1.3}
+\contentsline {subsection}{\numberline {1.3.1}研究现状概述}{4}{subsection.1.3.1}
+\contentsline {subsection}{\numberline {1.3.2}数据链路层的网络冗余发现系统}{5}{subsection.1.3.2}
+\contentsline {subsection}{\numberline {1.3.3}网络层的网络冗余发现系统}{5}{subsection.1.3.3}
+\contentsline {subsection}{\numberline {1.3.4}应用层的网络冗余发现系统}{6}{subsection.1.3.4}
+\contentsline {subsection}{\numberline {1.3.5}国内外研究现状小结}{7}{subsection.1.3.5}
+\contentsline {section}{\numberline {1.4}论文的主要内容与组织结构}{9}{section.1.4}
+\contentsline {subsection}{\numberline {1.4.1}论文的主要内容}{9}{subsection.1.4.1}
+\contentsline {subsection}{\numberline {1.4.2}组织结构}{9}{subsection.1.4.2}
+\contentsline {section}{\numberline {1.5}小结}{9}{section.1.5}
+\contentsline {chapter}{\numberline {第2章\hspace {.3em}}前人的工作基础}{11}{chapter.2}
+\contentsline {section}{\numberline {2.1}使用流式模糊哈希的原因及其优点}{11}{section.2.1}
+\contentsline {section}{\numberline {2.2}使用基于流式模糊哈希的相似性查找系统的原因及优点}{14}{section.2.2}
+\contentsline {section}{\numberline {2.3}小结}{15}{section.2.3}
+\contentsline {chapter}{\numberline {第3章\hspace {.3em}}预测性文件标识生成方法}{17}{chapter.3}
+\contentsline {section}{\numberline {3.1}背景知识}{17}{section.3.1}
+\contentsline {subsection}{\numberline {3.1.1}决策树}{17}{subsection.3.1.1}
+\contentsline {subsection}{\numberline {3.1.2}贝叶斯分类方法}{18}{subsection.3.1.2}
+\contentsline {subsection}{\numberline {3.1.3}URL}{19}{subsection.3.1.3}
+\contentsline {subsection}{\numberline {3.1.4}HTTP缓存}{19}{subsection.3.1.4}
+\contentsline {subsection}{\numberline {3.1.5}特征选择}{20}{subsection.3.1.5}
+\contentsline {subsection}{\numberline {3.1.6}信息熵}{22}{subsection.3.1.6}
+\contentsline {subsection}{\numberline {3.1.7}互信息}{22}{subsection.3.1.7}
+\contentsline {section}{\numberline {3.2}重复音视频文件预测实验}{23}{section.3.2}
+\contentsline {subsection}{\numberline {3.2.1}实验思路}{23}{subsection.3.2.1}
+\contentsline {subsection}{\numberline {3.2.2}机器学习算法选择}{23}{subsection.3.2.2}
+\contentsline {subsection}{\numberline {3.2.3}实验数据}{23}{subsection.3.2.3}
+\contentsline {subsection}{\numberline {3.2.4}数据标注}{25}{subsection.3.2.4}
+\contentsline {subsection}{\numberline {3.2.5}机器学习库}{25}{subsection.3.2.5}
+\contentsline {subsection}{\numberline {3.2.6}特征量化}{26}{subsection.3.2.6}
+\contentsline {subsection}{\numberline {3.2.7}实验结果}{26}{subsection.3.2.7}
+\contentsline {section}{\numberline {3.3}预测性文件标识生成}{27}{section.3.3}
+\contentsline {subsection}{\numberline {3.3.1}步骤}{27}{subsection.3.3.1}
+\contentsline {subsection}{\numberline {3.3.2}特征选择评价标准}{27}{subsection.3.3.2}
+\contentsline {subsection}{\numberline {3.3.3}实验数据}{27}{subsection.3.3.3}
+\contentsline {subsection}{\numberline {3.3.4}音视频特征的信息熵}{28}{subsection.3.3.4}
+\contentsline {subsection}{\numberline {3.3.5}音视频特征的互信息}{28}{subsection.3.3.5}
+\contentsline {section}{\numberline {3.4}小结}{29}{section.3.4}
+\contentsline {chapter}{\numberline {第4章\hspace {.3em}}基于流式模糊哈希的重复音视频检测方法}{31}{chapter.4}
+\contentsline {section}{\numberline {4.1}背景知识}{31}{section.4.1}
+\contentsline {subsection}{\numberline {4.1.1}单向流}{31}{subsection.4.1.1}
+\contentsline {subsection}{\numberline {4.1.2}流式模糊哈希相似不具有传递性}{31}{subsection.4.1.2}
+\contentsline {section}{\numberline {4.2}基于流式模糊哈希的重复音视频检测方法}{31}{section.4.2}
+\contentsline {subsection}{\numberline {4.2.1}步骤}{31}{subsection.4.2.1}
+\contentsline {subsection}{\numberline {4.2.2}音视频文件编号}{32}{subsection.4.2.2}
+\contentsline {subsection}{\numberline {4.2.3}预测性文件标识评价}{33}{subsection.4.2.3}
+\contentsline {section}{\numberline {4.3}重复音视频检测方法的可行性验证}{33}{section.4.3}
+\contentsline {subsection}{\numberline {4.3.1}预测性文件标识的误报率}{34}{subsection.4.3.1}
+\contentsline {subsection}{\numberline {4.3.2}预测性文件标识的漏报率}{34}{subsection.4.3.2}
+\contentsline {section}{\numberline {4.4}小结}{35}{section.4.4}
+\contentsline {chapter}{\numberline {第5章\hspace {.3em}}系统设计与实现}{37}{chapter.5}
+\contentsline {section}{\numberline {5.1}系统概述}{37}{section.5.1}
+\contentsline {section}{\numberline {5.2}系统测试}{37}{section.5.2}
+\contentsline {subsection}{\numberline {5.2.1}测试原理}{37}{subsection.5.2.1}
+\contentsline {subsection}{\numberline {5.2.2}特定音视频文件召回率测试}{38}{subsection.5.2.2}
+\contentsline {subsection}{\numberline {5.2.3}系统去重效果测试}{38}{subsection.5.2.3}
+\contentsline {section}{\numberline {5.3}小结}{41}{section.5.3}
+\contentsline {chapter}{\numberline {第6章\hspace {.3em}}总结与展望}{43}{chapter.6}
+\contentsline {section}{\numberline {6.1}全文总结}{43}{section.6.1}
+\contentsline {section}{\numberline {6.2}研究展望}{44}{section.6.2}
+\contentsline {chapter}{参考文献}{47}{section*.35}
+\contentsline {chapter}{致谢}{51}{appendix*.37}
+\contentsline {chapter}{作者简历及攻读学位期间发表的学术论文与研究成果}{53}{appendix*.38}
diff --git a/Tmp/陈冠林-硕士学位论文.pdf b/Tmp/陈冠林-硕士学位论文.pdf
new file mode 100644
index 0000000..312b075
--- /dev/null
+++ b/Tmp/陈冠林-硕士学位论文.pdf
Binary files differ
diff --git a/Tmp/陈冠林-硕士学位论文v1.1.pdf b/Tmp/陈冠林-硕士学位论文v1.1.pdf
new file mode 100644
index 0000000..2b6b82b
--- /dev/null
+++ b/Tmp/陈冠林-硕士学位论文v1.1.pdf
Binary files differ
diff --git a/Tmp/陈冠林-硕士毕业论文.pdf b/Tmp/陈冠林-硕士毕业论文.pdf
new file mode 100644
index 0000000..3f9d2e3
--- /dev/null
+++ b/Tmp/陈冠林-硕士毕业论文.pdf
Binary files differ
diff --git a/Tmp/陈冠林-硕士毕业论文v1.2.pdf b/Tmp/陈冠林-硕士毕业论文v1.2.pdf
new file mode 100644
index 0000000..d4a5e2d
--- /dev/null
+++ b/Tmp/陈冠林-硕士毕业论文v1.2.pdf
Binary files differ
diff --git a/Tmp/陈冠林-硕士毕业论文v1.3.pdf b/Tmp/陈冠林-硕士毕业论文v1.3.pdf
new file mode 100644
index 0000000..eeaf18d
--- /dev/null
+++ b/Tmp/陈冠林-硕士毕业论文v1.3.pdf
Binary files differ
diff --git a/Tmp/陈冠林-硕士毕业论文v1.4.pdf b/Tmp/陈冠林-硕士毕业论文v1.4.pdf
new file mode 100644
index 0000000..35ea7b0
--- /dev/null
+++ b/Tmp/陈冠林-硕士毕业论文v1.4.pdf
Binary files differ
diff --git a/Tmp/陈冠林-硕士毕业论文v1.5.pdf b/Tmp/陈冠林-硕士毕业论文v1.5.pdf
new file mode 100644
index 0000000..b756dfb
--- /dev/null
+++ b/Tmp/陈冠林-硕士毕业论文v1.5.pdf
Binary files differ
diff --git a/artratex.bat b/artratex.bat
new file mode 100644
index 0000000..25d7730
--- /dev/null
+++ b/artratex.bat
@@ -0,0 +1,56 @@
+@rem ------------------------------------------------
+@rem LaTeX Automated Compiler
+@rem <By Huangrui Mo>
+@rem Copyright (C) Huangrui Mo <[email protected]>
+@rem This is free software: you can redistribute it
+@rem and/or modify it under the terms of the GNU General
+@rem Public License as published by the Free Software
+@rem Foundation, either version 3 of the License, or
+@rem (at your option) any later version.
+@rem ------------------------------------------------
+@echo off
+@rem ------------------------------------------------
+@rem ->> Set tex compiler
+@rem ------------------------------------------------
+set CompilerOrder="2"
+@rem ------------------------------------------------
+if %CompilerOrder% == "1" (
+set CompileName="pdflatex"
+) else (
+set CompileName="xelatex"
+)
+@rem ------------------------------------------------
+@rem ->> Get source filename
+@rem ------------------------------------------------
+for %%F in (*.tex) do (
+set FileName=%%~nF
+)
+@rem ------------------------------------------------
+@rem ->> Set directory
+@rem ------------------------------------------------
+if not exist "./Tmp" (
+ md Tmp
+ )
+@rem ------------------------------------------------
+@rem ->> Set environmental variables
+@rem ------------------------------------------------
+set TEXINPUTS=.//;%TEXINPUTS%
+set BIBINPUTS=.//;%BIBINPUTS%
+set BSTINPUTS=.//;%BSTINPUTS%
+@rem ------------------------------------------------
+@rem ->> Build textual content
+@rem ------------------------------------------------
+%CompileName% -output-directory=Tmp %FileName%
+@rem ------------------------------------------------
+@rem ->> Build references and links
+@rem ------------------------------------------------
+bibtex ./Tmp/%FileName%
+%CompileName% -output-directory=Tmp %FileName%
+%CompileName% -output-directory=Tmp %FileName%
+@rem ------------------------------------------------
+@rem ->> View compiled file
+@rem ------------------------------------------------
+start " " /max "./Tmp/%FileName%.pdf"
+echo ------------------------------------------------
+echo %CompileName% %FileName%.tex finished...
+echo ------------------------------------------------
diff --git a/artratex.sh b/artratex.sh
new file mode 100644
index 0000000..9ea46c8
--- /dev/null
+++ b/artratex.sh
@@ -0,0 +1,110 @@
+#!/bin/bash
+set -e
+
+#---------------------------------------------------------------------------#
+#- LaTeX Automated Compiler -#
+#- <By Huangrui Mo> -#
+#- Copyright (C) Huangrui Mo <[email protected]> -#
+#- This is free software: you can redistribute it and/or modify it -#
+#- under the terms of the GNU General Public License as published by -#
+#- the Free Software Foundation, either version 3 of the License, or -#
+#- (at your option) any later version. -#
+#---------------------------------------------------------------------------#
+
+#---------------------------------------------------------------------------#
+#->> Preprocessing
+#---------------------------------------------------------------------------#
+#-
+#-> Get source filename
+#-
+if [[ "$#" == "1" ]]; then
+ FileName=`echo *.tex`
+elif [[ "$#" == "2" ]]; then
+ FileName="$2"
+else
+ echo "---------------------------------------------------------------------------"
+ echo "Usage: "$0" <l|p|x>< |a|b> <filename>"
+ echo "TeX engine parameters: <l:lualatex>, <p:pdflatex>, <x:xelatex>"
+ echo "Bib engine parameters: < :none>, <a:bibtex>, <b:biber>"
+ echo "---------------------------------------------------------------------------"
+ exit
+fi
+FileName=${FileName/.tex}
+#-
+#-> Get tex compiler
+#-
+if [[ $1 == *'l'* ]]; then
+ TexCompiler="lualatex"
+else
+ if [[ $1 == *'p'* ]]; then
+ TexCompiler="pdflatex"
+ else
+ TexCompiler="xelatex"
+ fi
+fi
+#-
+#-> Get bib compiler
+#-
+if [[ $1 == *'a'* ]]; then
+ BibCompiler="bibtex"
+elif [[ $1 == *'b'* ]]; then
+ BibCompiler="biber"
+else
+ BibCompiler=""
+fi
+#-
+#-> Set compilation out directory resembling the inclusion hierarchy
+#-
+Tmp="Tmp"
+Tex="Tex"
+if [[ ! -d $Tmp/$Tex ]]; then
+ mkdir -p $Tmp/$Tex
+fi
+#-
+#-> Set LaTeX environmental variables to add subdirs into search path
+#-
+export TEXINPUTS=".//:$TEXINPUTS" # paths to locate .tex
+export BIBINPUTS=".//:$BIBINPUTS" # paths to locate .bib
+export BSTINPUTS=".//:$BSTINPUTS" # paths to locate .bst
+#---------------------------------------------------------------------------#
+#->> Compiling
+#---------------------------------------------------------------------------#
+#-
+#-> Build textual content and auxiliary files
+#-
+$TexCompiler -output-directory=$Tmp $FileName || exit
+#-
+#-> Build references and links
+#-
+if [[ -n $BibCompiler ]]; then
+ #- fix the inclusion path for hierarchical auxiliary files
+ sed -i -e "s|\@input{|\@input{$Tmp/|g" $Tmp/"$FileName".aux
+ #- extract and format bibliography database via auxiliary files
+ $BibCompiler $Tmp/$FileName
+ #- insert reference indicators into textual content
+ $TexCompiler -output-directory=$Tmp $FileName || exit
+ #- refine citation references and links
+ $TexCompiler -output-directory=$Tmp $FileName || exit
+fi
+#---------------------------------------------------------------------------#
+#->> Postprocessing
+#---------------------------------------------------------------------------#
+#-
+#-> Set PDF viewer
+#-
+System_Name=`uname`
+if [[ $System_Name == "Linux" ]]; then
+ PDFviewer="xdg-open"
+elif [[ $System_Name == "Darwin" ]]; then
+ PDFviewer="open"
+else
+ PDFviewer="open"
+fi
+#-
+#-> Open the compiled file
+#-
+$PDFviewer ./$Tmp/"$FileName".pdf || exit
+echo "---------------------------------------------------------------------------"
+echo "$TexCompiler $BibCompiler "$FileName".tex finished..."
+echo "---------------------------------------------------------------------------"
+
diff --git a/模板使用说明.pdf b/模板使用说明.pdf
new file mode 100644
index 0000000..dd273ed
--- /dev/null
+++ b/模板使用说明.pdf
Binary files differ