file.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636
  1. from django.db import models
  2. import os, errno
  3. import csv
  4. from api.utils import *
  5. import json
  6. from random import randint
  7. import logging
  8. from django.http import FileResponse
  9. from Crypto.Cipher import ARC4
  10. from Crypto.Protocol.KDF import PBKDF2
  11. from Crypto.Hash import SHA512
  12. from django.contrib.auth.hashers import make_password
  13. from io import TextIOWrapper, BytesIO
  14. from ast import literal_eval
  15. from typing import Any
  16. import zipfile
  17. types = [
  18. ('csv', 'csv'),
  19. ]
  20. usages = [
  21. ('input', 'input'),
  22. ('show', 'show'),
  23. ('result', 'result'),
  24. ('output', 'output'),
  25. ]
  26. contents = [
  27. ('node', 'node'),
  28. ('edge', 'edge'),
  29. ]
  30. logger = logging.getLogger("file-model")
  31. # 盐值
  32. salt = "vrServer"
  33. # 加密
  34. def rc4_encrypt(key: bytes, data: bytes) -> bytes:
  35. cipher = ARC4.new(key)
  36. cipher.encrypt(b'\x00' * 1024) # 丢弃前1024字节密钥流
  37. return cipher.encrypt(data)
  38. # 解密
  39. def rc4_decrypt(key: bytes, data: bytes) -> bytes:
  40. return rc4_encrypt(key, data)
  41. # 由密码生成密钥
  42. def derive_key(password: str, salt: bytes, iterations: int) -> bytes:
  43. return PBKDF2(
  44. password.encode('utf-8'),
  45. salt,
  46. dkLen=32, # 生成256位密钥
  47. count=iterations,
  48. hmac_hash_module=SHA512
  49. )
  50. # 安全解析json
  51. def safe_json_parse(json_str: str, default: Any = None) -> Any:
  52. # 预处理空字符串
  53. stripped_str = json_str.strip()
  54. if not stripped_str:
  55. return default if default is not None else []
  56. try:
  57. data = json.loads(stripped_str)
  58. except json.JSONDecodeError:
  59. return default if default is not None else []
  60. # 递归检查嵌套空列表
  61. def is_empty_nested_list(obj):
  62. if isinstance(obj, list):
  63. return all(is_empty_nested_list(item) for item in obj)
  64. return False
  65. # 如果是空列表或嵌套空列表,返回默认值
  66. if data == [] or is_empty_nested_list(data):
  67. return default if default is not None else []
  68. return data
  69. class FileManager(models.Manager):
  70. def getHistory(self, user):
  71. # try:
  72. files = user.own_files.filter(usage="input").all()
  73. history = []
  74. for file in files:
  75. fileId = file.id
  76. if file.content == "node" and not file.own_missions_node.exists():
  77. # 输入的节点文件没有对应的任务,应该删除
  78. file.delete()
  79. continue
  80. if file.content == "edge" and not file.own_missions_edge.exists():
  81. # 输入的边文件没有对应的任务,应该删除
  82. continue
  83. directory = os.path.join(BASE_FILE_PATH, str(user.id))
  84. path = os.path.join(directory, str(fileId))
  85. # 首先检查文件是否被归档压缩
  86. archivePath = os.path.join(BASE_FILE_PATH, "archive.zip")
  87. with zipfile.ZipFile(archivePath, 'a') as zipf:
  88. fileArchPath = os.path.normpath(f"{file.user.id}/{file.id}").replace("\\", "/")
  89. if fileArchPath in zipf.namelist():
  90. # 重复添加压缩,则跳过压缩步骤直接将原始文件删除即可
  91. size = zipf.getinfo(fileArchPath).file_size
  92. else:
  93. # 文件未被压缩,查找是否真的有文件存在,否则清理掉没有实际文件的数据库记录
  94. try:
  95. size = os.path.getsize(path)
  96. except FileNotFoundError:
  97. print("未找到对应文件,现将记录删除", fileId, file.name)
  98. self.get(id=fileId).delete()
  99. continue
  100. except Exception as error:
  101. print("读取历史记录时出现未知错误")
  102. return FAILED
  103. if size >= 1024 * 1024:
  104. size = size / (1024 * 1024)
  105. size = f"{size:.2f} MB"
  106. else:
  107. size = size / 1024
  108. size = f"{size:.2f} KB"
  109. if file.content == 'node':
  110. missions = file.own_missions_node.all()
  111. fileInfo = {
  112. '节点总数': file.own_file_info.nodes,
  113. 'S节点数': file.own_file_info.sNodes,
  114. 'D节点数': file.own_file_info.dNodes,
  115. 'I节点数': file.own_file_info.iNodes,
  116. }
  117. elif file.content == 'edge':
  118. missions = file.own_missions_edge.all()
  119. fileInfo = {
  120. '边总数': file.own_file_info.edges,
  121. }
  122. else:
  123. logger.error(f"获取历史文件出错,文件格式错误 content: {file.content}")
  124. return FAILED
  125. history.append({
  126. 'id': file.id,
  127. 'name': file.name,
  128. 'uploadTime': file.update_time,
  129. 'size': size,
  130. 'encrypted': file.encrypted,
  131. 'content': file.content,
  132. 'missions': [{'id': mission.id, 'name': mission.name} for mission in missions],
  133. 'fileInfo': fileInfo,
  134. })
  135. return history
  136. # except Exception as error:
  137. # print("Failed to get upload history", error)
  138. # return FAILED
  139. # Create your models here.
  140. class File(models.Model):
  141. name = models.CharField(default="untitled", max_length=64)
  142. type = models.CharField(choices=types, max_length=5)
  143. usage = models.CharField(choices=usages, max_length=20)
  144. create_time = models.DateTimeField(auto_now_add=True)
  145. update_time = models.DateTimeField(auto_now=True)
  146. content = models.CharField(choices=contents, max_length=10)
  147. encrypted = models.BooleanField(default=False)
  148. archived = models.BooleanField(default=False)
  149. key = models.CharField(blank=True, null=True, max_length=128)
  150. associate = models.ForeignKey('self', on_delete=models.CASCADE, blank=True, null=True)
  151. user = models.ForeignKey(to="api.User", on_delete=models.CASCADE, related_name='own_files')
  152. objects = FileManager()
  153. def encrypt(self, password):
  154. # 该密码仅用于验证
  155. verifyPassword = make_password(
  156. password,
  157. salt='vrviewer',
  158. hasher='pbkdf2_sha256'
  159. )
  160. self.key = verifyPassword
  161. if self.encrypted:
  162. logger.error(f"文件{self.id}已经过加密,无法再次加密")
  163. return False
  164. else:
  165. # 仍使用用户输入密码加密
  166. key = derive_key(
  167. password=password,
  168. salt=salt,
  169. iterations=4,
  170. )
  171. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  172. with open(path, 'rb') as f:
  173. original_data = f.read()
  174. with open(path, 'wb') as f:
  175. f.write(rc4_encrypt(key, original_data))
  176. self.encrypted = True
  177. self.save()
  178. return True
  179. def decrypted(self, password):
  180. # 仅用于验证
  181. verifyPassword = make_password(
  182. password,
  183. salt='vrviewer',
  184. hasher='pbkdf2_sha256'
  185. )
  186. if not verifyPassword == self.key:
  187. logger.error(f"文件{self.id}解密密钥错误")
  188. return False
  189. if not self.encrypted:
  190. logger.error(f"文件{self.id}未经过加密,无法进行解密")
  191. return False
  192. else:
  193. key = derive_key(
  194. password=password,
  195. salt=salt,
  196. iterations=4,
  197. )
  198. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  199. with open(path, 'rb') as f:
  200. original_data = f.read()
  201. with open(path, 'wb') as f:
  202. f.write(rc4_decrypt(key, original_data))
  203. self.encrypted = False
  204. self.save()
  205. return True
  206. def decryptToData(self, password):
  207. # 仅用于验证
  208. verifyPassword = make_password(
  209. password,
  210. salt='vrviewer',
  211. hasher='pbkdf2_sha256'
  212. )
  213. if not verifyPassword == self.key:
  214. logger.error(f"文件{self.id}解密密钥错误")
  215. return False
  216. if not self.encrypted:
  217. logger.error(f"文件{self.id}未经过加密,无法进行解密")
  218. return False
  219. else:
  220. key = derive_key(
  221. password=password,
  222. salt=salt,
  223. iterations=4,
  224. )
  225. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  226. with open(path, 'rb') as f:
  227. original_data = f.read()
  228. return TextIOWrapper( BytesIO(rc4_decrypt(key, original_data)), encoding='utf-8', newline='')
  229. def verify(self, password):
  230. verifyPassword = make_password(
  231. password,
  232. salt='vrviewer',
  233. hasher='pbkdf2_sha256'
  234. )
  235. if not self.encrypted:
  236. logger.error(f"文件{self.id}未经过加密,无法进行解密验证")
  237. return False
  238. if not verifyPassword == self.key:
  239. logger.error(f"文件{self.id}验证密钥错误")
  240. return False
  241. return True
  242. def archive(self):
  243. if not self.archived:
  244. filePath = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  245. archivePath = os.path.join(BASE_FILE_PATH, "archive.zip")
  246. try:
  247. with zipfile.ZipFile(archivePath, 'a') as zipf:
  248. fileArchPath = os.path.normpath(f"{self.user.id}/{self.id}").replace("\\", "/")
  249. if fileArchPath in zipf.namelist():
  250. # 重复添加压缩,则跳过压缩步骤直接将原始文件删除即可
  251. self.archived = True
  252. self.save()
  253. os.remove(filePath)
  254. else:
  255. # 使用用户id和文件id组合成压缩文件中索引
  256. zipf.write(filePath, fileArchPath)
  257. self.archived = True
  258. self.save()
  259. os.remove(filePath)
  260. except Exception as error:
  261. logger.error(f"压缩文件{self.id} {self.name}失败: {error}")
  262. else:
  263. pass
  264. def unzip(self):
  265. if not self.archived:
  266. self.error(f"解压文件{self.id} {self.name}失败,文件并未压缩")
  267. return
  268. else:
  269. filePath = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  270. archivePath = os.path.join(BASE_FILE_PATH, "archive.zip")
  271. try:
  272. with zipfile.ZipFile(archivePath, 'r') as zipf:
  273. fileArchPath = os.path.normpath(f"{self.user.id}/{self.id}").replace("\\", "/")
  274. if fileArchPath in zipf.namelist():
  275. with zipf.open(fileArchPath) as zipd:
  276. content = zipd.read()
  277. with open(filePath, 'wb') as f:
  278. f.write(content)
  279. # 恢复压缩标记
  280. self.archived = False
  281. self.save()
  282. else:
  283. raise ValueError(f"该文件不存在压缩文件中")
  284. except Exception as error:
  285. logger.error(f"解压缩文件{self.id} {self.name}失败:{error}")
  286. def download(self):
  287. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  288. archivePath = os.path.join(BASE_FILE_PATH, "archive.zip")
  289. # 需要检查文件是否被归档,下载归档文件并不需要解压
  290. if self.archived:
  291. with zipfile.ZipFile(archivePath, 'r') as zipf:
  292. fileArchPath = os.path.normpath(f"{self.user.id}/{self.id}").replace("\\", "/")
  293. if fileArchPath in zipf.namelist():
  294. # 在压缩文件中找到文件,将其数据读出用于下载
  295. with zipf.open(fileArchPath) as zfile:
  296. logger.info("从压缩包中下载")
  297. content = zfile.read()
  298. response = FileResponse(BytesIO(content))
  299. response['Content-Disposition'] = f'attachment; filename="{self.name}"'
  300. return FileResponse(open(path, 'rb'))
  301. else:
  302. logger.info(f"文件{self.id} {self.name}具有压缩标记,但未在压缩文件中找到")
  303. raise ValueError(f"文件{self.id} {self.name}具有压缩标记,但未在压缩文件中找到")
  304. if not os.path.exists(path):
  305. return False
  306. # 加密后文件也不允许下载
  307. if self.encrypted:
  308. return False
  309. else:
  310. response = FileResponse(open(path), 'rb')
  311. response['Content-Disposition'] = f'attachment; filename="{self.name}"'
  312. return FileResponse(open(path, 'rb'))
  313. def saveWithInfo(self):
  314. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  315. if self.content in ['node', 'nodes']:
  316. sCount = dCount = iCount = 0
  317. nodeFile = csv.reader(open(path, 'r'))
  318. for line in nodeFile:
  319. if line[1] == 'S':
  320. sCount += 1
  321. if line[1] == 'D':
  322. dCount += 1
  323. if line[1] == 'I':
  324. iCount += 1
  325. fileInfo = FileInfo()
  326. fileInfo.file = self
  327. fileInfo.nodes = sCount + dCount + iCount
  328. fileInfo.sNodes = sCount
  329. fileInfo.dNodes = dCount
  330. fileInfo.iNodes = iCount
  331. fileInfo.save()
  332. if self.content in ['edge', 'edges']:
  333. edges = 0
  334. edgeFile = csv.reader(open(path, 'r'))
  335. for line in edgeFile:
  336. if line:
  337. edges += 1
  338. fileInfo = FileInfo()
  339. fileInfo.file = self
  340. fileInfo.edges = edges
  341. fileInfo.save()
  342. self.save()
  343. def generate(self, data):
  344. # 从json结果生成文件
  345. path = os.path.join(BASE_FILE_PATH, str(self.user.id))
  346. if os.path.exists(os.path.join(path, str(self.id))):
  347. self.delete()
  348. return FILE_ALREADY_EXIST
  349. else:
  350. try:
  351. os.mkdir(path)
  352. except Exception as error:
  353. if not error.args[0] == 17:
  354. print(error)
  355. return FILE_FAILED_CREATE_DIR
  356. if self.content == 'node':
  357. nodes = []
  358. file = open(os.path.join(path, str(self.id)), 'w', newline='')
  359. csvFile = csv.writer(file)
  360. for line in data:
  361. if not str(line[0]).isdigit():
  362. logger.error("check file illegal failed node id wrong")
  363. return FAILED
  364. if not line[1] in ['S', 'D', 'I']:
  365. logger.error("check file illegal failed node type wrong")
  366. return FAILED
  367. if line[0] not in nodes:
  368. nodes.append(line[0])
  369. else:
  370. logger.error("check file illegal failed node dudplicate id")
  371. return FAILED
  372. # 除了节点编号和节点类型外,其余参数全部放在line的后续位置,以字符串json的格式保存
  373. csvFile.writerow(line)
  374. file.close()
  375. return OK
  376. if self.content == 'edge':
  377. edges = []
  378. file = open(os.path.join(path, str(self.id)), 'w', newline='')
  379. csvFile = csv.writer(file)
  380. for line in data:
  381. if not str(line[0]).isdigit() or not str(line[1]).isdigit():
  382. logger.error("check file illegal failed edge len =2")
  383. return FAILED
  384. # 注意默认将边视为无向边
  385. # 检查重复
  386. if [line[0], line[1]] not in edges and [line[1], line[0]] not in edges:
  387. edges.append([line[0], line[1]])
  388. # 后续参数放在line的后续位置
  389. csvFile.writerow(line)
  390. file.close()
  391. return OK
  392. return UNKNOWN_CONTENT
  393. def storage(self, file):
  394. # 将file数据保存成文件,不对file做任何处理
  395. try:
  396. path = os.path.join(BASE_FILE_PATH, str(self.user.id))
  397. if os.path.exists(os.path.join(path, str(self.id))):
  398. self.delete()
  399. return FILE_ALREADY_EXIST
  400. else:
  401. try:
  402. os.mkdir(path)
  403. except Exception as error:
  404. if not error.args[0] == 17:
  405. print(error)
  406. return FILE_FAILED_CREATE_DIR
  407. file_path = os.path.join(path, str(self.id))
  408. f = open(file_path, 'wb')
  409. for bite in file:
  410. f.write(bite)
  411. f.close()
  412. return OK
  413. except Exception as error:
  414. logger.error(error)
  415. return FAILED
  416. # 检查文件是否合法
  417. def checkIllegal(self):
  418. # 检查文件前需要检查是否被压缩,如被压缩则需要解压
  419. if self.archived:
  420. self.unzip()
  421. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  422. path2 = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.associate.id))
  423. if self.content == 'node':
  424. file = csv.reader(open(path, 'r'))
  425. # 针对csv文件的检测
  426. if self.type == 'csv':
  427. nodes = []
  428. for line in file:
  429. if not len(line) >= 2:
  430. logger.error("check file illegal failed node len >= 2")
  431. return False
  432. if not line[0].isdigit():
  433. logger.error("check file illegal failed node id wrong")
  434. return False
  435. if not line[1] in ['S', 'D', 'I']:
  436. logger.error(f"check file illegal failed node type wrong:{line}")
  437. return False
  438. if line[0] not in nodes:
  439. nodes.append(line[0])
  440. else:
  441. logger.error("check file illegal failed node dudplicate id")
  442. return False
  443. return True
  444. if self.content == 'edge':
  445. edgeFile = csv.reader(open(path, 'r'))
  446. nodeFile = csv.reader(open(path2, 'r'))
  447. # 针对csv文件的检测
  448. if self.type == 'csv':
  449. nodes = []
  450. edges = []
  451. for line in nodeFile:
  452. if not len(line) >= 2:
  453. logger.error("check file illegal failed node len >= 2")
  454. return False
  455. if not line[0].isdigit():
  456. logger.error("check file illegal failed node id wrong")
  457. return False
  458. nodes.append(line[0])
  459. for line in edgeFile:
  460. if not len(line) == 2:
  461. logger.error("check file illegal failed edge len =2")
  462. return False
  463. if line[0] not in nodes or line[1] not in nodes:
  464. logger.error("check file illegal failed edge id not exist")
  465. return False
  466. if [line[0], line[1]] not in edges and [line[1], line[0]] not in edges:
  467. edges.append([line[0], line[1]])
  468. else:
  469. # 将图视为无向图,同一条边的正反算作重复
  470. # 直接去除重复边
  471. logger.error("check file illegal failed edge duplicate edge")
  472. return False
  473. return True
  474. def toJson(self, request=None):
  475. # 需要检查文件是否被归档压缩,如有则需要先解压
  476. if self.archived:
  477. self.unzip()
  478. # 检查是否为加密文件,只有当文件usage为input时才应该存在加密属性
  479. if self.usage == 'input' and self.encrypted:
  480. # 如果被加密则需要从request中获取解密密钥
  481. key = request.session.get('encrypt-keys', {}).get(str(self.id), '')
  482. if key:
  483. file = csv.reader(self.decryptToData(key))
  484. else:
  485. raise KeyError(f"解密文件{self.id}所需密钥不存在")
  486. else:
  487. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  488. file = csv.reader(open(path, 'r'))
  489. if self.content == 'node':
  490. if self.type == 'csv':
  491. nodes = []
  492. for line in file:
  493. # 如果有额外数据,则放入第三个字段中
  494. node = {'id': line[0], 'type': line[1], 'meta': []}
  495. for el in range(2, len(line)):
  496. # 对于meta字段,写入时数据为不带双引号,以冒号分割的字串
  497. # 或者是直接正常的json字段,应尝试两种方式解析
  498. try:
  499. metaJson = safe_json_parse(line[el].replace('\'', '\"'))
  500. # 检测是否嵌套过多
  501. while metaJson:
  502. if type(metaJson[0]) == list:
  503. metaJson = metaJson[0]
  504. else:
  505. break
  506. node['meta'] = metaJson
  507. except Exception as error:
  508. logger.info(f"尝试以json格式解析文件meta内容{line[el]}失败,尝试以非标准格式解析{error}")
  509. # 尝试以冒号分隔格式解析
  510. elList = el.split(':')
  511. if len(elList) != 2:
  512. logger.info(f"尝试以非标准格式解析文件meta内容{el}失败,放弃解析")
  513. continue
  514. else:
  515. node['meta'].append({
  516. elList[0]: elList[1]
  517. })
  518. # # 测试用,添加optimize
  519. # el = '{"optimize": "old"}'
  520. # node['meta'].append(json.loads(el))
  521. # # 测试用,添加group
  522. # el = '{"group": "' + str(randint(1,5)) + '"}'
  523. # node['meta'].append(json.loads(el))
  524. nodes.append(node)
  525. return nodes
  526. if self.content == 'edge':
  527. if self.type == 'csv':
  528. edges = []
  529. for line in file:
  530. # 如果有额外数据,则放入第三个字段中
  531. edge = {'from': line[0], 'to': line[1], 'meta': []}
  532. for el in range(2, len(line)):
  533. try:
  534. metaJson = safe_json_parse(line[el].replace('\'', '\"'))
  535. # 检测是否嵌套过多
  536. while metaJson:
  537. if type(metaJson[0]) == list:
  538. metaJson = metaJson[0]
  539. else:
  540. break
  541. edge['meta'] = metaJson
  542. except Exception as error:
  543. logger.info(f"尝试以json格式解析文件meta内容{line[el]}失败,尝试以非标准格式解析{error}")
  544. # 尝试以冒号分隔格式解析
  545. elList = el.split(':')
  546. if len(elList) != 2:
  547. logger.info(f"尝试以非标准格式解析文件meta内容{el}失败,放弃解析")
  548. continue
  549. else:
  550. edge['meta'].append({
  551. elList[0]: elList[1]
  552. })
  553. # # 测试用,添加optimize
  554. # el = '{"optimize": "old"}'
  555. # edge['meta'].append(json.loads(el))
  556. edges.append(edge)
  557. # logger.info(edges)
  558. return edges
  559. def deleteStorage(self):
  560. path = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.id))
  561. if self.associate:
  562. path2 = os.path.join(os.path.join(BASE_FILE_PATH, str(self.user.id)), str(self.associate.id))
  563. else:
  564. path2 = ""
  565. failedFlag = False
  566. for p in [path, path2]:
  567. if os.path.exists(p):
  568. try:
  569. os.remove(p)
  570. except Exception as error:
  571. # 可能出现失败的原因是文件被占用
  572. logger.error(f"删除文件{self.id} {self.name}失败:{error}")
  573. failedFlag = True
  574. # 无论文件删除是否成功,都要把记录删除,多余的文件可以再后续清理时删除
  575. if self.associate:
  576. self.associate.delete()
  577. if self:
  578. self.delete()
  579. if failedFlag:
  580. return FAILED
  581. return OK
  582. class Meta:
  583. app_label = 'api'
  584. class FileInfo(models.Model):
  585. file = models.OneToOneField(File, on_delete=models.CASCADE, related_name='own_file_info')
  586. nodes = models.IntegerField(default=0)
  587. sNodes = models.IntegerField(default=0)
  588. dNodes = models.IntegerField(default=0)
  589. iNodes = models.IntegerField(default=0)
  590. edges = models.IntegerField(default=0)
  591. # 待添加集中度等边的信息
  592. class Meta:
  593. app_label = 'api'