file.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339
  1. from django.db import models
  2. import os, errno
  3. import csv
  4. from api.utils import *
  5. import json
  6. from random import randint
  7. import logging
  8. types = [
  9. ('csv', 'csv'),
  10. ]
  11. usages = [
  12. ('input', 'input'),
  13. ('show', 'show'),
  14. ('result', 'result'),
  15. ('output', 'output'),
  16. ]
  17. contents = [
  18. ('node', 'node'),
  19. ('edge', 'edge'),
  20. ]
  21. logger = logging.getLogger("file-model")
  22. class FileManager(models.Manager):
  23. def getHistory(self, user):
  24. # try:
  25. files = user.own_files.filter(usage="input").all()
  26. history = []
  27. for file in files:
  28. fileId = file.id
  29. directory = os.path.join(BASE_FILE_PATH, str(user.id))
  30. path = os.path.join(directory, str(fileId))
  31. try:
  32. size = os.path.getsize(path)
  33. except FileNotFoundError:
  34. print("未找到对应文件,现将记录删除", fileId, file.name)
  35. self.get(id=fileId).delete()
  36. continue
  37. except Exception as error:
  38. print("读取历史记录时出现未知错误")
  39. return FAILED
  40. if size >= 1024 * 1024:
  41. size = size / (1024 * 1024)
  42. size = f"{size:.2f} MB"
  43. else:
  44. size = size / 1024
  45. size = f"{size:.2f} KB"
  46. if file.content == 'node':
  47. missions = file.own_missions_node.all()
  48. fileInfo = {
  49. '节点总数': file.own_file_info.nodes,
  50. 'S节点数': file.own_file_info.sNodes,
  51. 'D节点数': file.own_file_info.dNodes,
  52. 'I节点数': file.own_file_info.iNodes,
  53. }
  54. elif file.content == 'edge':
  55. missions = file.own_missions_edge.all()
  56. fileInfo = {
  57. '边总数': file.own_file_info.edges,
  58. }
  59. else:
  60. logger.error(f"获取历史文件出错,文件格式错误 content: {file.content}")
  61. return FAILED
  62. history.append({
  63. 'id': file.id,
  64. 'name': file.name,
  65. 'uploadTime': file.update_time,
  66. 'size': size,
  67. 'content': file.content,
  68. 'missions': [{'id': mission.id, 'name': mission.name} for mission in missions],
  69. 'fileInfo': fileInfo,
  70. })
  71. return history
  72. # except Exception as error:
  73. # print("Failed to get upload history", error)
  74. # return FAILED
  75. # Create your models here.
  76. class File(models.Model):
  77. name = models.CharField(default="untitled", max_length=64)
  78. type = models.CharField(choices=types, max_length=5)
  79. usage = models.CharField(choices=usages, max_length=20)
  80. create_time = models.DateTimeField(auto_now_add=True)
  81. update_time = models.DateTimeField(auto_now=True)
  82. content = models.CharField(choices=contents, max_length=10)
  83. associate = models.ForeignKey('self', on_delete=models.CASCADE, blank=True, null=True)
  84. user = models.ForeignKey(to="api.User", on_delete=models.CASCADE, related_name='own_files')
  85. objects = FileManager()
  86. def saveWithInfo(self):
  87. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  88. if self.content in ['node', 'nodes']:
  89. sCount = dCount = iCount = 0
  90. nodeFile = csv.reader(open(path, 'r'))
  91. for line in nodeFile:
  92. if line[1] == 'S':
  93. sCount += 1
  94. if line[1] == 'D':
  95. dCount += 1
  96. if line[1] == 'I':
  97. iCount += 1
  98. fileInfo = FileInfo()
  99. fileInfo.file = self
  100. fileInfo.nodes = sCount + dCount + iCount
  101. fileInfo.sNodes = sCount
  102. fileInfo.dNodes = dCount
  103. fileInfo.iNodes = iCount
  104. fileInfo.save()
  105. if self.content in ['edge', 'edges']:
  106. edges = 0
  107. edgeFile = csv.reader(open(path, 'r'))
  108. for line in edgeFile:
  109. if line:
  110. edges += 1
  111. fileInfo = FileInfo()
  112. fileInfo.file = self
  113. fileInfo.edges = edges
  114. fileInfo.save()
  115. self.save()
  116. def generate(self, data):
  117. # 从json结果生成文件
  118. path = os.path.join(BASE_FILE_PATH, str(self.user.id))
  119. if os.path.exists(os.path.join(path, str(self.id))):
  120. self.delete()
  121. return FILE_ALREADY_EXIST
  122. else:
  123. try:
  124. os.mkdir(path)
  125. except Exception as error:
  126. if not error.args[0] == 17:
  127. print(error)
  128. return FILE_FAILED_CREATE_DIR
  129. if self.content == 'node':
  130. nodes = []
  131. file = open(os.path.join(path, str(self.id)), 'w', newline='')
  132. csvFile = csv.writer(file)
  133. for line in data:
  134. if not str(line[0]).isdigit():
  135. logger.error("check file illegal failed node id wrong")
  136. return FAILED
  137. if not line[1] in ['S', 'D', 'I']:
  138. logger.error("check file illegal failed node type wrong")
  139. return FAILED
  140. if line[0] not in nodes:
  141. nodes.append(line[0])
  142. else:
  143. logger.error("check file illegal failed node dudplicate id")
  144. return FAILED
  145. # 除了节点编号和节点类型外,其余参数全部放在line的后续位置,以字符串json的格式保存
  146. csvFile.writerow(line)
  147. file.close()
  148. return OK
  149. if self.content == 'edge':
  150. edges = []
  151. file = open(os.path.join(path, str(self.id)), 'w', newline='')
  152. csvFile = csv.writer(file)
  153. for line in data:
  154. if not str(line[0]).isdigit() or not str(line[1]).isdigit():
  155. logger.error("check file illegal failed edge len =2")
  156. return FAILED
  157. # 注意默认将边视为无向边
  158. # 检查重复
  159. if [line[0], line[1]] not in edges and [line[1], line[0]] not in edges:
  160. edges.append([line[0], line[1]])
  161. # 后续参数放在line的后续位置
  162. csvFile.writerow(line)
  163. file.close()
  164. return OK
  165. return UNKNOWN_CONTENT
  166. def storage(self, file):
  167. try:
  168. path = os.path.join(BASE_FILE_PATH, str(self.user.id))
  169. if os.path.exists(os.path.join(path, str(self.id))):
  170. self.delete()
  171. return FILE_ALREADY_EXIST
  172. else:
  173. try:
  174. os.mkdir(path)
  175. except Exception as error:
  176. if not error.args[0] == 17:
  177. print(error)
  178. return FILE_FAILED_CREATE_DIR
  179. file_path = os.path.join(path, str(self.id))
  180. f = open(file_path, 'wb')
  181. for bite in file:
  182. f.write(bite)
  183. f.close()
  184. return OK
  185. except Exception as error:
  186. logger.error(error)
  187. return FAILED
  188. # 检查文件是否合法
  189. def checkIllegal(self):
  190. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  191. path2 = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.associate.id))
  192. if self.content == 'node':
  193. file = csv.reader(open(path, 'r'))
  194. # 针对csv文件的检测
  195. if self.type == 'csv':
  196. nodes = []
  197. for line in file:
  198. if not len(line) >= 2:
  199. logger.error("check file illegal failed node len >= 2")
  200. return False
  201. if not line[0].isdigit():
  202. logger.error("check file illegal failed node id wrong")
  203. return False
  204. if not line[1] in ['S', 'D', 'I']:
  205. logger.error("check file illegal failed node type wrong")
  206. return False
  207. if line[0] not in nodes:
  208. nodes.append(line[0])
  209. else:
  210. logger.error("check file illegal failed node dudplicate id")
  211. return False
  212. return True
  213. if self.content == 'edge':
  214. edgeFile = csv.reader(open(path, 'r'))
  215. nodeFile = csv.reader(open(path2, 'r'))
  216. # 针对csv文件的检测
  217. if self.type == 'csv':
  218. nodes = []
  219. edges = []
  220. for line in nodeFile:
  221. if not len(line) >= 2:
  222. logger.error("check file illegal failed node len >= 2")
  223. return False
  224. if not line[0].isdigit():
  225. logger.error("check file illegal failed node id wrong")
  226. return False
  227. nodes.append(line[0])
  228. for line in edgeFile:
  229. if not len(line) == 2:
  230. logger.error("check file illegal failed edge len =2")
  231. return False
  232. if line[0] not in nodes or line[1] not in nodes:
  233. logger.error("check file illegal failed edge id not exist")
  234. return False
  235. if [line[0], line[1]] not in edges and [line[1], line[0]] not in edges:
  236. edges.append([line[0], line[1]])
  237. else:
  238. # 将图视为无向图,同一条边的正反算作重复
  239. # 直接去除重复边
  240. logger.error("check file illegal failed edge duplicate edge")
  241. return False
  242. return True
  243. def toJson(self):
  244. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  245. file = csv.reader(open(path, 'r'))
  246. if self.content == 'node':
  247. if self.type == 'csv':
  248. nodes = []
  249. for line in file:
  250. # 如果有额外数据,则放入第三个字段中
  251. node = {'id': line[0], 'type': line[1], 'meta': []}
  252. for el in range(2, len(line)):
  253. logger.error(el)
  254. node['meta'].append(json.loads(el))
  255. # 测试用,添加optimize
  256. el = '{"optimize": "old"}'
  257. node['meta'].append(json.loads(el))
  258. # 测试用,添加group
  259. el = '{"group": "' + str(randint(1,5)) + '"}'
  260. node['meta'].append(json.loads(el))
  261. nodes.append(node)
  262. return nodes
  263. if self.content == 'edge':
  264. if self.type == 'csv':
  265. edges = []
  266. for line in file:
  267. # 如果有额外数据,则放入第三个字段中
  268. edge = {'from': line[0], 'to': line[1], 'meta': []}
  269. for el in range(2, len(line)):
  270. edge['meta'].append(json.loads(el))
  271. # 测试用,添加optimize
  272. el = '{"optimize": "old"}'
  273. edge['meta'].append(json.loads(el))
  274. edges.append(edge)
  275. return edges
  276. def deleteStorage(self):
  277. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  278. if self.associate:
  279. path2 = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.associate.id))
  280. else:
  281. path2 = ""
  282. failedFlag = False
  283. for p in [path, path2]:
  284. if os.path.exists(p):
  285. try:
  286. os.remove(p)
  287. except Exception as error:
  288. # 可能出现失败的原因是文件被占用
  289. logger.error(f"删除文件{self.id} {self.name}失败:{error}")
  290. failedFlag = True
  291. # 无论文件删除是否成功,都要把记录删除,多余的文件可以再后续清理时删除
  292. if self.associate:
  293. self.associate.delete()
  294. if self:
  295. self.delete()
  296. if failedFlag:
  297. return FAILED
  298. return OK
  299. class Meta:
  300. app_label = 'api'
  301. class FileInfo(models.Model):
  302. file = models.OneToOneField(File, on_delete=models.CASCADE, related_name='own_file_info')
  303. nodes = models.IntegerField(default=0)
  304. sNodes = models.IntegerField(default=0)
  305. dNodes = models.IntegerField(default=0)
  306. iNodes = models.IntegerField(default=0)
  307. edges = models.IntegerField(default=0)
  308. # 待添加集中度等边的信息
  309. class Meta:
  310. app_label = 'api'