Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
uoega
Zero Shot Action Recognition
Commits
8ed37647
Commit
8ed37647
authored
Jul 25, 2021
by
uoega
Browse files
latex paper update
parent
906df2aa
Changes
9
Expand all
Hide whitespace changes
Inline
Side-by-side
LearningToCompare_ZSL/classification_MLP/Classification_40.py
View file @
8ed37647
...
...
@@ -7,6 +7,37 @@ import torch.optim as optim
import
torch.nn.functional
as
F
from
torch.optim.lr_scheduler
import
StepLR
"""
batch_size = 5
seq_len = 10
embed_dim = 2
value_dim = 12
num_heads = 1
query = torch.randn(seq_len, batch_size,embed_dim)
key = torch.randn(seq_len, batch_size,embed_dim)
value = torch.randn(seq_len, batch_size,embed_dim)
mha = nn.MultiheadAttention(embed_dim, num_heads)
attn_out,_ = mha(query, key, value)
attn_out = attn_out.permute(1,0,2)
m = nn.BatchNorm1d(3, affine=False)
input = torch.tensor([[1., 2., 3.], [1., 2., 3.], [1., 2., 3.], [1., 2., 3.]]) #batchsize,channels 4,3
print(input.shape)
print(input)
output = m(input)
print(output)
m = nn.BatchNorm1d(3, affine=False)
input = torch.tensor([[1., 1., 1.], [2., 2., 2.], [3., 3., 3.], [4., 4., 4.]]) #batchsize,channels 4,3
print(input.shape)
print(input)
output = m(input)
print(output)
exit()
"""
class
Net
(
nn
.
Module
):
...
...
@@ -14,8 +45,10 @@ class Net(nn.Module):
super
().
__init__
()
self
.
fc1
=
nn
.
Linear
(
256
,
256
)
self
.
fc2
=
nn
.
Linear
(
256
,
40
)
self
.
bn
=
nn
.
BatchNorm1d
(
256
,
affine
=
True
)
def
forward
(
self
,
x
):
x
=
self
.
bn
(
x
)
x
=
F
.
relu
(
self
.
fc1
(
x
))
x
=
self
.
fc2
(
x
)
return
x
...
...
@@ -23,15 +56,71 @@ class Net(nn.Module):
class
Net2
(
nn
.
Module
):
def
__init__
(
self
):
super
().
__init__
()
self
.
bn
=
nn
.
BatchNorm1d
(
256
,
affine
=
False
)
self
.
fc1
=
nn
.
Linear
(
256
,
40
)
def
forward
(
self
,
x
):
x
=
self
.
bn
(
x
)
x
=
self
.
fc1
(
x
)
return
x
class
Net_attention
(
nn
.
Module
):
def
__init__
(
self
):
super
().
__init__
()
self
.
self_attn
=
nn
.
MultiheadAttention
(
1
,
1
)
self
.
fc1
=
nn
.
Linear
(
256
,
40
)
def
forward
(
self
,
x
):
x
=
x
.
view
(
-
1
,
256
,
1
)
#batchsize, features, embedding_size
x
=
x
.
permute
(
1
,
0
,
2
)
#features, batchsize, embedding_size
x
,
_
=
self
.
self_attn
(
x
,
x
,
x
)
#features, batchsize, embedding_size
x
=
x
.
permute
(
1
,
0
,
2
)
#batchsize, features, embedding_size
x
=
x
.
view
(
-
1
,
256
)
#batchsize, features
x
=
self
.
fc1
(
x
)
return
x
class
Net_attention2
(
nn
.
Module
):
def
__init__
(
self
):
super
().
__init__
()
self
.
self_attn
=
nn
.
MultiheadAttention
(
256
,
1
,
dropout
=
0.5
)
self
.
fc1
=
nn
.
Linear
(
256
,
40
)
def
forward
(
self
,
x
):
x
=
x
.
view
(
-
1
,
1
,
256
)
#batchsize, features, embedding_size
x
=
x
.
permute
(
1
,
0
,
2
)
#features, batchsize, embedding_size
x
,
_
=
self
.
self_attn
(
x
,
x
,
x
)
#features, batchsize, embedding_size
x
=
x
.
permute
(
1
,
0
,
2
)
#batchsize, features, embedding_size
x
=
x
.
view
(
-
1
,
256
)
#batchsize, features
x
=
self
.
fc1
(
x
)
return
x
class
Net_attention3
(
nn
.
Module
):
def
__init__
(
self
):
super
().
__init__
()
self
.
self_attn
=
nn
.
MultiheadAttention
(
64
,
1
)
self
.
fc1
=
nn
.
Linear
(
256
,
40
)
def
forward
(
self
,
x
):
x
=
x
.
view
(
-
1
,
4
,
64
)
#batchsize, features, embedding_size
x
=
x
.
permute
(
1
,
0
,
2
)
#features, batchsize, embedding_size
x
,
_
=
self
.
self_attn
(
x
,
x
,
x
)
#features, batchsize, embedding_size
x
=
x
.
permute
(
1
,
0
,
2
)
#batchsize, features, embedding_size
x
=
torch
.
flatten
(
x
,
start_dim
=
1
)
#batchsize, features
#print(x.shape)
x
=
self
.
fc1
(
x
)
return
x
def
main
():
# init weights with seed
print
(
"init seed"
)
seed
=
23
torch
.
manual_seed
(
seed
)
torch
.
cuda
.
manual_seed
(
seed
)
torch
.
backends
.
cudnn
.
deterministic
=
True
torch
.
backends
.
cudnn
.
benchmark
=
False
np
.
random
.
seed
(
seed
)
#init CUDA
device
=
torch
.
device
(
"cuda:0"
if
torch
.
cuda
.
is_available
()
else
"cpu"
)
print
(
device
)
...
...
@@ -71,7 +160,7 @@ def main():
train_data
=
TensorDataset
(
train_features
,
train_label
)
val_data
=
TensorDataset
(
val_features
,
val_label
)
trainloader
=
DataLoader
(
train_data
,
batch_size
=
256
,
shuffle
=
True
)
trainloader
=
DataLoader
(
train_data
,
batch_size
=
128
,
shuffle
=
True
)
testloader
=
DataLoader
(
val_data
,
batch_size
=
32
,
shuffle
=
False
)
#load net for valdiation
...
...
@@ -83,22 +172,26 @@ def main():
net
=
Net
().
to
(
device
)
criterion
=
nn
.
CrossEntropyLoss
().
to
(
device
)
optimizer
=
optim
.
Adam
(
net
.
parameters
(),
lr
=
0.01
)
#
optimizer = optim.Adam(net.parameters(), lr=0.
00
01)
#scheduler = StepLR(optimizer,step_size=500,gamma=0.5)
#
optimizer = torch.optim.Adagrad(net.parameters(), lr=0.01, lr_decay=0.0, weight_decay=0, initial_accumulator_value=0, eps=1e-10)
optimizer
=
torch
.
optim
.
Adagrad
(
net
.
parameters
(),
lr
=
0.01
,
lr_decay
=
0.0
,
weight_decay
=
0
,
initial_accumulator_value
=
0
,
eps
=
1e-10
)
#optimizer = torch.optim.Adadelta(net.parameters(), lr=1.0, rho=0.9, eps=1e-06, weight_decay=0)
class_accuracy
=
0
print
(
"Dimension of trainable parameters: "
)
for
parameter
in
net
.
parameters
():
print
(
parameter
.
shape
)
#print(parameter)
print
(
"start training..."
)
for
epoch
in
range
(
2000
):
# loop over the dataset multiple times
net
.
train
()
running_loss
=
0.0
for
i
,
data
in
enumerate
(
trainloader
):
# get the inputs; data is a list of [inputs, labels]
inputs
,
labels
=
data
# zero the parameter gradients
optimizer
.
zero_grad
()
...
...
@@ -112,9 +205,12 @@ def main():
# print statistics
if
epoch
%
10
==
9
:
# print every 10 epochs
print
(
'[%d] loss: %.3f'
%
(
epoch
+
1
,
loss
.
item
()))
#for parameter in net.parameters():
# print(parameter)
if
epoch
%
100
==
99
:
if
epoch
%
10
==
9
:
net
.
eval
()
correct
=
0
total
=
0
# since we're not training, we don't need to calculate the gradients for our outputs
...
...
@@ -130,7 +226,7 @@ def main():
print
(
'Accuracy: %.2f %%'
%
(
100
*
correct
/
total
))
if
epoch
%
5
00
==
4
99
:
if
epoch
%
1
00
==
99
:
# prepare to count predictions for each class
correct_pred
=
np
.
zeros
(
40
)
total_pred
=
np
.
zeros
(
40
)
...
...
Template/latex/Architektur2.PNG
0 → 100644
View file @
8ed37647
190 KB
Template/latex/egbib.bib
View file @
8ed37647
...
...
@@ -44,11 +44,34 @@
title
=
{Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks}
,
author
=
{Nils Reimers and Iryna Gurevych}
,
year
=
{2019}
,
jornal
=
{arXiv:1908.10084}
,
jo
u
rnal
=
{arXiv:1908.10084}
,
archivePrefix
=
{arXiv}
,
primaryClass
=
{cs.CL}
}
@article
{
duan2021revisiting
,
title
=
{Revisiting Skeleton-based Action Recognition}
,
author
=
{Haodong Duan and Yue Zhao and Kai Chen and Dian Shao and Dahua Lin and Bo Dai}
,
year
=
{2021}
,
journal
=
{arXiv:2104.13586}
,
archivePrefix
=
{arXiv}
,
primaryClass
=
{cs.CV}
}
@article
{
Liu_2020
,
title
=
{NTU RGB+D 120: A Large-Scale Benchmark for 3D Human Activity Understanding}
,
volume
=
{42}
,
ISSN
=
{1939-3539}
,
url
=
{http://dx.doi.org/10.1109/TPAMI.2019.2916873}
,
DOI
=
{10.1109/tpami.2019.2916873}
,
number
=
{10}
,
journal
=
{IEEE Transactions on Pattern Analysis and Machine Intelligence}
,
publisher
=
{Institute of Electrical and Electronics Engineers (IEEE)}
,
author
=
{Liu, Jun and Shahroudy, Amir and Perez, Mauricio and Wang, Gang and Duan, Ling-Yu and Kot, Alex C.}
,
year
=
{2020}
,
month
=
{Oct}
,
pages
=
{2684–2701}
}
@misc
{
Authors14
,
author
=
{Authors}
,
...
...
Template/latex/paper_working_design.aux
View file @
8ed37647
...
...
@@ -16,43 +16,54 @@
\gdef\HyperFirstAtBeginDocument
#1
{
#1
}
\providecommand\HyField
@AuxAddToFields[1]
{}
\providecommand\HyField
@AuxAddToCoFields[2]
{}
\citation
{
duan2021revisiting
}
\citation
{
Liu
_
2020
}
\citation
{
jasani2019skeleton
}
\citation
{
reimers2019sentencebert
}
\@
writefile
{
toc
}{
\contentsline
{
section
}{
\numberline
{
1
}
\hskip
-1em.
\nobreakspace
{}
Introduction
}{
1
}{
section.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
1.1
}
\hskip
-1em.
\nobreakspace
{}
Zero-shot learning
}{
1
}{
subsection.1.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
1.2
}
\hskip
-1em.
\nobreakspace
{}
Skeleton-based visual recognition
}{
1
}{
subsection.1.2
}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
1.3
}
\hskip
-1em.
\nobreakspace
{}
Data augmentation
}{
1
}{
subsection.1.3
}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
1.3
}
\hskip
-1em.
\nobreakspace
{}
Related work
}{
1
}{
subsection.1.3
}}
\@
writefile
{
lof
}{
\contentsline
{
figure
}{
\numberline
{
1
}{
\ignorespaces
Architecture of the network.
}}{
2
}{
figure.1
}}
\newlabel
{
fig:long
}{{
1
}{
2
}{
Architecture of the network
}{
figure.1
}{}}
\newlabel
{
fig:onecol
}{{
1
}{
2
}{
Architecture of the network
}{
figure.1
}{}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
1.4
}
\hskip
-1em.
\nobreakspace
{}
Data augmentation
}{
2
}{
subsection.1.4
}}
\@
writefile
{
toc
}{
\contentsline
{
section
}{
\numberline
{
2
}
\hskip
-1em.
\nobreakspace
{}
Method
}{
2
}{
section.2
}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
2.1
}
\hskip
-1em.
\nobreakspace
{}
Architecture
}{
2
}{
subsection.2.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
2.1.1
}
Visual path
}{
2
}{
subsubsection.2.1.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
2.1.2
}
Semantic Path
}{
2
}{
subsubsection.2.1.2
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
2.1.3
}
Similarity-Learning-Part
}{
2
}{
subsubsection.2.1.3
}}
\citation
{
liu2019roberta
}
\citation
{
ma2019nlpaug
}
\@
writefile
{
toc
}{
\contentsline
{
section
}{
\numberline
{
2
}
\hskip
-1em.
\nobreakspace
{}
Method
}{
2
}{
section.2
}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
2.1
}
\hskip
-1em.
\nobreakspace
{}
Augmentations
}{
2
}{
subsection.2.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
2.1.1
}
Automatic Augmentation
}{
2
}{
subsubsection.2.1.1
}}
\citation
{
jasani2019skeleton
}
\citation
{
sung2018learning
}
\@
writefile
{
lof
}{
\contentsline
{
figure
}{
\numberline
{
1
}{
\ignorespaces
Architecture or other needed for method
}}{
3
}{
figure.1
}}
\newlabel
{
fig:long
}{{
1
}{
3
}{
Architecture or other needed for method
}{
figure.1
}{}}
\newlabel
{
fig:onecol
}{{
1
}{
3
}{
Architecture or other needed for method
}{
figure.1
}{}}
\@
writefile
{
lot
}{
\contentsline
{
table
}{
\numberline
{
1
}{
\ignorespaces
Unseen top-1 and top-5 accuracies results in detail.
}}{
3
}{
table.1
}}
\@
writefile
{
lof
}{
\contentsline
{
figure
}{
\numberline
{
2
}{
\ignorespaces
aug example1
}}{
3
}{
figure.2
}}
\newlabel
{
fig:long
}{{
2
}{
3
}{
aug example1
}{
figure.2
}{}}
\newlabel
{
fig:onecol
}{{
2
}{
3
}{
aug example1
}{
figure.2
}{}}
\@
writefile
{
lof
}{
\contentsline
{
figure
}{
\numberline
{
3
}{
\ignorespaces
aug example2
}}{
3
}{
figure.3
}}
\newlabel
{
fig:long
}{{
3
}{
3
}{
aug example2
}{
figure.3
}{}}
\newlabel
{
fig:onecol
}{{
3
}{
3
}{
aug example2
}{
figure.3
}{}}
\@
writefile
{
lot
}{
\contentsline
{
table
}{
\numberline
{
2
}{
\ignorespaces
ZSL and GZSL results for different approaches.
}}{
3
}{
table.2
}}
\newlabel
{
tab:ZSL
_
GZSL
}{{
2
}{
3
}{
ZSL and GZSL results for different approaches
}{
table.2
}{}}
\@
writefile
{
lot
}{
\contentsline
{
table
}{
\numberline
{
3
}{
\ignorespaces
Unseen top-1 and top-5 accuracies results in detail.
}}{
3
}{
table.3
}}
\newlabel
{
tab:top1
_
top5
}{{
3
}{
3
}{
Unseen top-1 and top-5 accuracies results in detail
}{
table.3
}{}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
2.2
}
\hskip
-1em.
\nobreakspace
{}
Experiments
}{
3
}{
subsection.2.2
}}
\@
writefile
{
lot
}{
\contentsline
{
table
}{
\numberline
{
1
}{
\ignorespaces
Three descriptive labels for class "Squat down".
}}{
3
}{
table.1
}}
\newlabel
{
tab:multi
_
label
}{{
1
}{
3
}{
Three descriptive labels for class "Squat down"
}{
table.1
}{}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
2.2
}
\hskip
-1em.
\nobreakspace
{}
Augmentation
}{
3
}{
subsection.2.2
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
2.2.1
}
Descriptive labels
}{
3
}{
subsubsection.2.2.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
2.2.2
}
Multiple labels per class
}{
3
}{
subsubsection.2.2.2
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
2.2.3
}
Automatic augmentation
}{
3
}{
subsubsection.2.2.3
}}
\@
writefile
{
lot
}{
\contentsline
{
table
}{
\numberline
{
2
}{
\ignorespaces
Descriptive label and two automatic augmentations for "Squat down".
}}{
3
}{
table.2
}}
\newlabel
{
tab:auto
_
aug
}{{
2
}{
3
}{
Descriptive label and two automatic augmentations for "Squat down"
}{
table.2
}{}}
\@
writefile
{
lot
}{
\contentsline
{
table
}{
\numberline
{
3
}{
\ignorespaces
ZSL and GZSL results for different approaches.
}}{
3
}{
table.3
}}
\newlabel
{
tab:ZSL
_
GZSL
}{{
3
}{
3
}{
ZSL and GZSL results for different approaches
}{
table.3
}{}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
2.3
}
\hskip
-1em.
\nobreakspace
{}
Experiments
}{
3
}{
subsection.2.3
}}
\citation
{
jasani2019skeleton
}
\citation
{
ma2019nlpaug
}
\bibstyle
{
ieee
_
fullname
}
\bibdata
{
egbib
}
\bibcite
{
jasani2019skeleton
}{
1
}
\@
writefile
{
lot
}{
\contentsline
{
table
}{
\numberline
{
4
}{
\ignorespaces
Unseen top-1 and top-5 accuracies in detail.
}}{
4
}{
table.4
}}
\newlabel
{
tab:top1
_
top5
}{{
4
}{
4
}{
Unseen top-1 and top-5 accuracies in detail
}{
table.4
}{}}
\@
writefile
{
toc
}{
\contentsline
{
section
}{
\numberline
{
3
}
\hskip
-1em.
\nobreakspace
{}
Results
}{
4
}{
section.3
}}
\@
writefile
{
toc
}{
\contentsline
{
subsection
}{
\numberline
{
3.1
}
\hskip
-1em.
\nobreakspace
{}
Discussion
}{
4
}{
subsection.3.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
3.1.1
}
From default to descriptive labels
}{
4
}{
subsubsection.3.1.1
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
3.1.2
}
Using multiple labels
}{
4
}{
subsubsection.3.1.2
}}
\@
writefile
{
toc
}{
\contentsline
{
subsubsection
}{
\numberline
{
3.1.3
}
Automatic augmentation
}{
4
}{
subsubsection.3.1.3
}}
\@
writefile
{
toc
}{
\contentsline
{
section
}{
\numberline
{
4
}
\hskip
-1em.
\nobreakspace
{}
Conclusion
}{
4
}{
section.4
}}
\bibcite
{
liu2019roberta
}{
2
}
\bibcite
{
ma2019nlpaug
}{
3
}
\bibcite
{
sung2018learning
}{
4
}
\bibstyle
{
ieee
_
fullname
}
\bibdata
{
egbib
}
\bibcite
{
duan2021revisiting
}{
1
}
\bibcite
{
jasani2019skeleton
}{
2
}
\bibcite
{
Liu
_
2020
}{
3
}
\bibcite
{
liu2019roberta
}{
4
}
\bibcite
{
ma2019nlpaug
}{
5
}
\bibcite
{
reimers2019sentencebert
}{
6
}
\bibcite
{
sung2018learning
}{
7
}
Template/latex/paper_working_design.bbl
View file @
8ed37647
\begin{thebibliography}{1}\itemsep=-1pt
\bibitem{duan2021revisiting}
Haodong Duan, Yue Zhao, Kai Chen, Dian Shao, Dahua Lin, and Bo Dai.
\newblock Revisiting skeleton-based action recognition.
\newblock {\em arXiv:2104.13586}, 2021.
\bibitem{jasani2019skeleton}
Bhavan Jasani and Afshaan Mazagonwalla.
\newblock Skeleton based zero shot action recognition in joint pose-language
semantic space.
\newblock {\em arXiv:1911.11344}, 2019.
\bibitem{Liu_2020}
Jun Liu, Amir Shahroudy, Mauricio Perez, Gang Wang, Ling-Yu Duan, and Alex~C.
Kot.
\newblock Ntu rgb+d 120: A large-scale benchmark for 3d human activity
understanding.
\newblock {\em IEEE Transactions on Pattern Analysis and Machine Intelligence},
42(10):2684–2701, Oct 2020.
\bibitem{liu2019roberta}
Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer
Levy, Mike Lewis, Luke Zettlemoyer, and Veselin Stoyanov.
...
...
@@ -17,6 +30,11 @@ Edward Ma.
\newblock Nlp augmentation.
\newblock https://github.com/makcedward/nlpaug, 2019.
\bibitem{reimers2019sentencebert}
Nils Reimers and Iryna Gurevych.
\newblock Sentence-bert: Sentence embeddings using siamese bert-networks.
\newblock {\em arXiv:1908.10084}, 2019.
\bibitem{sung2018learning}
Flood Sung, Yongxin Yang, Li Zhang, Tao Xiang, Philip H.~S. Torr, and
Timothy~M. Hospedales.
...
...
Template/latex/paper_working_design.log
View file @
8ed37647
This is pdfTeX, Version 3.14159265-2.6-1.40.19 (MiKTeX 2.9.6840 64-bit) (preloaded format=pdflatex 2018.10.16) 2
4
JUL 2021
22:08
This is pdfTeX, Version 3.14159265-2.6-1.40.19 (MiKTeX 2.9.6840 64-bit) (preloaded format=pdflatex 2018.10.16) 2
5
JUL 2021
15:41
entering extended mode
**./paper_working_design.tex
(paper_working_design.tex
...
...
@@ -285,20 +285,7 @@ Package uniquecounter Info: New unique counter `rerunfilecheck' on input line 2
82.
)
\Hy@SectionHShift=\skip48
) (paper_working_design.aux
LaTeX Warning: Label `fig:long' multiply defined.
LaTeX Warning: Label `fig:onecol' multiply defined.
LaTeX Warning: Label `fig:long' multiply defined.
LaTeX Warning: Label `fig:onecol' multiply defined.
)
) (paper_working_design.aux)
\openout1 = `paper_working_design.aux'.
LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 25.
...
...
@@ -316,7 +303,8 @@ LaTeX Font Info: ... okay on input line 25.
LaTeX Font Info: Checking defaults for PD1/pdf/m/n on input line 25.
LaTeX Font Info: ... okay on input line 25.
LaTeX Font Info: Try loading font information for OT1+ptm on input line 25.
("C:\Users\XPS15\AppData\Local\Programs\MiKTeX 2.9\tex\latex\psnfss\ot1ptm.fd"
("C:\Users\XPS15\AppData\Local\Programs\MiKTeX 2.9\tex\latex\psnfss\ot1ptm.fd"
File: ot1ptm.fd 2001/06/04 font definitions for OT1/ptm.
) ABD: EveryShipout initializing macros
("C:\Users\XPS15\AppData\Local\Programs\MiKTeX 2.9\tex\latex\graphics\color.sty
...
...
@@ -393,136 +381,65 @@ File: ot1pcr.fd 2001/06/04 font definitions for OT1/pcr.
)
LaTeX Font Info: Font shape `OT1/ptm/bx/n' in size <12> not available
(Font) Font shape `OT1/ptm/b/n' tried instead on input line 52.
[1{C:/Users/XPS15/AppData/Local/MiKTeX/2.9/pdftex/config/pdftex.map}
]
Underfull \hbox (badness 4859) in paragraph at lines 78--79
[]\OT1/ptm/m/n/10 Die gew[]ahlte Ar-chitek-tur f[]ur un-sere Ex-per-i-mente
[]
Underfull \hbox (badness 10000) in paragraph at lines 72--75
Underfull \hbox (badness 1038) in paragraph at lines 78--79
\OT1/ptm/m/n/10 seinen einzel-nen Mod-ulen zusam-menge-baut. Einzelne
[]
[1{C:/Users/XPS15/AppData/Local/MiKTeX/2.9/pdftex/config/pdftex.map}
Underfull \hbox (badness 10000) in paragraph at lines 78--79
[]
]
<Architektur2.png, id=24, 885.6839pt x 440.77171pt>
File: Architektur2.png Graphic file (type png)
<use Architektur2.png>
Package pdftex.def Info: Architektur2.png used on input line 87.
(pdftex.def) Requested size: 213.4209pt x 106.21107pt.
LaTeX Font Info: Font shape `OT1/ptm/bx/n' in size <10> not available
(Font) Font shape `OT1/ptm/b/n' tried instead on input line 82.
Underfull \hbox (badness 10000) in paragraph at lines 80--85
[]
Underfull \hbox (badness 10000) in paragraph at lines 87--92
[]
Underfull \hbox (badness 1062) in paragraph at lines 94--100
\OT1/ptm/m/n/10 ab-u-lar, d.h. alle m[]oglichen Klassen-la-bels, in ein se-
[]
Underfull \hbox (badness 1127) in paragraph at lines 94--100
\OT1/ptm/m/n/10 die Ab-bil-dung der se-man-tis-chen Merk-male in den vi-
[]
Underfull \hbox (badness 1168) in paragraph at lines 94--100
\OT1/ptm/m/n/10 Net (RN), das im fol-gen-den Ab-schnitt n[]aher erl[]autert
[]
<Architektur.png, id=16, 817.527pt x 418.509pt>
File: Architektur.png Graphic file (type png)
<use Architektur.png>
Package pdftex.def Info: Architektur.png used on input line 104.
(pdftex.def) Requested size: 189.70947pt x 97.11714pt.
Underfull \hbox (badness 4181) in paragraph at lines 113--114
(Font) Font shape `OT1/ptm/b/n' tried instead on input line 98.
[2 <./Architektur2.png>]
Underfull \hbox (badness 4181) in paragraph at lines 137--139
\OT1/ptm/m/n/10 To re-duce the man-ual an-no-ta-tion ef-fort, we would
[]
Underfull \hbox (badness 6477) in paragraph at lines 1
1
3--1
14
Underfull \hbox (badness 6477) in paragraph at lines 13
7
--1
39
\OT1/ptm/m/n/10 like to gen-er-ate ad-di-tional la-bels au-to-mat-i-cally for
[]
Underfull \hbox (badness 1888) in paragraph at lines 1
1
3--1
14
Underfull \hbox (badness 1888) in paragraph at lines 13
7
--1
39
\OT1/ptm/m/n/10 the multi la-bel ap-proach. There-for we're us-ing the
[]
Underfull \hbox (badness 10000) in paragraph at lines 113--114
[]
[2]
Underfull \hbox (badness 10000) in paragraph at lines 115--119
[]
Underfull \hbox (badness 10000) in paragraph at lines 121--125
[]
Overfull \hbox (16.13214pt too wide) in paragraph at lines 129--139
[]
[]
<aug_example1.png, id=27, 1195.08984pt x 126.4725pt>
File: aug_example1.png Graphic file (type png)
<use aug_example1.png>
Package pdftex.def Info: aug_example1.png used on input line 146.
(pdftex.def) Requested size: 213.4209pt x 22.58458pt.
<aug_example2.png, id=28, 888.31876pt x 242.78203pt>
File: aug_example2.png Graphic file (type png)
<use aug_example2.png>
Package pdftex.def Info: aug_example2.png used on input line 156.
(pdftex.def) Requested size: 213.4209pt x 58.32814pt.
Underfull \vbox (badness 5711) has occurred while \output is active []
[3 <./Architektur.png> <./aug_example1.png> <./aug_example2.png>]
Underfull \vbox (badness 6412) has occurred while \output is active []
Underfull \vbox (badness 10000) has occurred while \output is active []
(paper_working_design.bbl [4]
Underfull \hbox (badness 10000) in paragraph at lines 16--19
[3]
[4] (paper_working_design.bbl
Underfull \hbox (badness 10000) in paragraph at lines 29--32
[]\OT1/ptm/m/n/9 Edward Ma. Nlp aug-men-ta-tion.
[]
)
Package atveryend Info: Empty hook `BeforeClearDocument' on input line 23
9
.
Package atveryend Info: Empty hook `BeforeClearDocument' on input line 23
5
.
[5
]
Package atveryend Info: Empty hook `AfterLastShipout' on input line 23
9
.
Package atveryend Info: Empty hook `AfterLastShipout' on input line 23
5
.
(paper_working_design.aux)
Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 239.
Package atveryend Info: Empty hook `AtEndAfterFileList' on input line 239.
LaTeX Warning: There were multiply-defined labels.
Package atveryend Info: Empty hook `AtVeryVeryEnd' on input line 239.
Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 235.
Package atveryend Info: Empty hook `AtEndAfterFileList' on input line 235.
Package atveryend Info: Empty hook `AtVeryVeryEnd' on input line 235.
)
Here is how much of TeX's memory you used:
6261 strings out of 492970
92
074
string characters out of 3126593
1
89874
words of memory out of 3000000
100
14
multiletter control sequences out of 15000+200000
92
105
string characters out of 3126593
1
92127
words of memory out of 3000000
100
07
multiletter control sequences out of 15000+200000
29095 words of font info for 69 fonts, out of 3000000 for 9000
1141 hyphenation exceptions out of 8191
32i,13n,27p,1
165b,468
s stack positions out of 5000i,500n,10000p,200000b,50000s
32i,13n,27p,1
294b,324
s stack positions out of 5000i,500n,10000p,200000b,50000s
{C:/Users/XPS15/AppData/Local/Programs/MiKTeX 2.9/fonts/enc/dvips/base/8r.enc
}<C:/Users/XPS15/AppData/Local/Programs/MiKTeX 2.9/fonts/type1/public/amsfonts/
cm/cmmi10.pfb><C:/Users/XPS15/AppData/Local/Programs/MiKTeX 2.9/fonts/type1/pub
...
...
@@ -532,9 +449,9 @@ iKTeX 2.9/fonts/type1/urw/courier/ucrr8a.pfb><C:/Users/XPS15/AppData/Local/Prog
rams/MiKTeX 2.9/fonts/type1/urw/times/utmb8a.pfb><C:/Users/XPS15/AppData/Local/
Programs/MiKTeX 2.9/fonts/type1/urw/times/utmr8a.pfb><C:/Users/XPS15/AppData/Lo
cal/Programs/MiKTeX 2.9/fonts/type1/urw/times/utmri8a.pfb>
Output written on paper_working_design.pdf (5 pages,
557
66
1
bytes).
Output written on paper_working_design.pdf (5 pages,
243
66
2
bytes).
PDF statistics:
1
02
PDF objects out of 1000 (max. 8388607)
31
named destinations out of 1000 (max. 500000)
1
6 words of extra memory for PDF output out of 10000 (max. 10000000)
1
16
PDF objects out of 1000 (max. 8388607)
40
named destinations out of 1000 (max. 500000)
6 words of extra memory for PDF output out of 10000 (max. 10000000)
Template/latex/paper_working_design.pdf
View file @
8ed37647
No preview for this file type
Template/latex/paper_working_design.synctex.gz
View file @
8ed37647
No preview for this file type
Template/latex/paper_working_design.tex
View file @
8ed37647
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment