• R/O
  • HTTP
  • SSH
  • HTTPS

ultron: Commit

高橋君


Commit MetaInfo

Révision34ae2927eba2d754bb53a93158e23a320eca49ca (tree)
l'heure2018-06-10 17:11:31
Auteurunknown <shupeluter@hotm...>
Commiterunknown

Message de Log

numpyデータで返すメソッドを追加

Change Summary

  • delete: src/main/Python/DataGenerator.py
  • modified: src/main/Python/DataReader.py (diff)
  • delete: src/main/Python/Lern.py
  • delete: src/main/Python/TestFileUtil.py
  • delete: src/main/Python/sample1.py
  • delete: src/test/Python/HelloChainar.py

Modification

--- a/src/main/Python/DataGenerator.py
+++ /dev/null
@@ -1,13 +0,0 @@
1-from Data import Data
2-from typing import List
3-"""
4-学習データファイルの読み込みデータから、学習データ形式に変換するモジュール
5-
6-学習データx:array[総データファイル][日][価格データ]を想定
7-学習データy: array[総データファイル][正解ラベル]
8-
9-となるように生成する。
10-
11-"""
12-
13-__whole_data: List[Data]
--- a/src/main/Python/DataReader.py
+++ b/src/main/Python/DataReader.py
@@ -18,15 +18,24 @@ class DataReader:
1818 self.DATA_SOURCE = confdata['dataPath']
1919
2020 def get_learning_data(self):
21+ '''
22+
23+ :return: トレーニングデータ、トレーニングラベル、テストデータ、テストラベル
24+ '''
25+
2126 target_data = self.parse_data_files()
2227 datasets: List(numpy.ndarray) = []
23- labels: List(str) =[]
28+ labels: List(str) = []
2429
2530 for cdata in target_data:
2631 datasets.append(cdata.get_array_data())
2732 labels.append(cdata.getLable())
2833
29- return numpy.array(datasets), numpy.array(labels)
34+ center = len(datasets)//2
35+ offset = len(datasets) % 2
36+ center = center + offset
37+
38+ return numpy.array(datasets[:center]), numpy.array(labels[:center]), numpy.array(datasets[center:]), numpy.array(labels[center:])
3039
3140 def __check_data(self, data: Data):
3241
--- a/src/main/Python/Lern.py
+++ /dev/null
@@ -1,78 +0,0 @@
1-from chainer import Function, gradient_check, report, training, utils, Variable
2-from chainer import datasets, iterators, optimizers, serializers
3-from chainer import Link, Chain, ChainList
4-import chainer.functions as F
5-import chainer.links as L
6-from chainer.training import extensions
7-import math
8-from DataReader import DataReader
9-from DatasetGenerator import DatasetGenerator
10-
11-from Data import Data
12-
13-class MyChain(Chain):
14- def __init__(self):
15- super(MyChain,self).__init__(
16- l1=L.Linear(500,100),
17- l2=L.Linear(100,100),
18- l3=L.Linear(100,10),
19- )
20-
21- def __call__(self,x,t):
22- return F.softmax_cross_entropy(self.fwd(x),t)
23-
24- def fwd(self,x):
25- h1=F.relu(self.l1(x))
26- h2=F.relu(self.l2(h1))
27- return self.l3(h2)
28-
29-
30-class MyClassifer(Chain):
31- def __init__(self,predictor):
32- super(MyClassifer,self).__init__()
33- with self.init_scope():
34- self.predictor = predictor
35- def __call__(self,x,t):
36- y = self.predictor(x)
37- loss = F.softmax_cross_entropy(y,t)
38- accuracy = F.accuracy(y,t)
39- report({'loss': loss, 'accuracy': accuracy}, self)
40- return loss;
41-
42-def main():
43-# try:
44- #モデルを準備
45- model = MyChain()
46-
47- #オプティマイザを準備
48- optimizer = optimizers.Adam()
49- optimizer.setup(model)
50-
51- #データを準備
52- train,test = dataPreparation()
53-
54- updater = training.StandardUpdater(train,optimizer)
55- trainer = training.Trainer(updater,(10,'epoch'))
56- trainer.extend(extensions.progress_bar)
57- trainer.run()
58-
59-def dataPreparation():
60- return "hoge"
61-
62-def dataPreparation_back():
63- # データ用意
64- train_data = []
65- train_label = []
66- # 元データ生成
67- reader = DataReader() # type DataReader
68- dgene = DatasetGenerator() # type DataSetGenerator
69- dataist = []
70- dataList = reader.createLearningData()
71- train_data, train_label = dgene.generateDataset(dataList)
72- dust, batchsize = math.modf(len(dataList) / 2)
73- batchsize = int(batchsize)
74- train = datasets.tuple_dataset.TupleDataset(train_data[0:batchsize], train_label[0:batchsize])
75- test = datasets.tuple_dataset.TupleDataset(train_data[batchsize:], train_label[batchsize:])
76- return train,test
77-
78-main()
\ No newline at end of file
--- a/src/main/Python/TestFileUtil.py
+++ /dev/null
@@ -1,14 +0,0 @@
1-import unittest
2-
3-class TestFileUtil (unittest.TestCase):
4- """test class of hoge"""
5-
6- def test_1(self):
7- print(1)
8- def test_2(self):
9- print(2)
10-
11-if __name__ == "__main__":
12- unittest.main()
13-
14-
--- a/src/main/Python/sample1.py
+++ /dev/null
@@ -1,18 +0,0 @@
1-import os
2-import yaml
3-
4-DATA_SOURCE = ''
5-
6-with open('config\\toolconf.yml' , 'r') as yml:
7- data = yaml.load(yml)
8- # 設定値の読み込み
9- DATA_SOURCE = data['dataPath']
10-
11-def find_all_files(directory):
12- for root, dirs, files in os.walk(directory):
13- yield root
14- for file in files:
15- yield os.path.join(root,file)
16-
17-for file in find_all_files(DATA_SOURCE):
18- print(file)
\ No newline at end of file
--- a/src/test/Python/HelloChainar.py
+++ /dev/null
@@ -1 +0,0 @@
1-from chainer.datasets import mnist
Afficher sur ancien navigateur de dépôt.