原始程序:
template_ftp_server_old.py:
import socket
import json
import struct
import os
import time
import pymysql.cursorssoc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
HOST = '192.168.31.111'
PORT = 4101
soc.bind((HOST,PORT))
port_str = str(PORT)
slice_id = int(port_str[:2]) - 20
ueX_str = 'user'
ue_id = int(port_str[3])
ueX_delay = ueX_str + str(ue_id) + '_delay' #
ueX_v = ueX_str + str(ue_id) + '_v' #
ueX_loss = ueX_str + str(ue_id) + '_loss' #
soc.listen(5)
cnx = pymysql.connect(host='localhost', port=3306, user='root', password='123456', db='qt', charset='utf8mb4', connect_timeout=20)
cursor = cnx.cursor()# 上传函数
def uploading_file():ftp_dir = r'C:/Users/yinzhao/Desktop/ftp/ftpseverfile' # 将传送的name.txt保存在该文件夹当中if not os.path.isdir(ftp_dir):os.mkdir(ftp_dir)head_bytes_len = conn.recv(4) # 拿到struct后的头长度head_len = struct.unpack('i', head_bytes_len)[0] # 取出真正的头长度# 拿到真正的头部内容conn.send("1".encode('utf8'))head_bytes = conn.recv(head_len)conn.send("1".encode('utf8'))# 反序列化后取出头head = json.loads(head_bytes)file_name = head['file_name']print(file_name)file_path = os.path.join(ftp_dir, file_name)data_len = head['data_len']received_size = 0data=b''#****************************************************旧程序问题1:一次发送最大数据量,越大传输速率越快*************************************max_size=65536*4while received_size < data_len:#************************************************旧程序问题2:在接收文件的循环中有大量统计 计算 操作数据库的代码,占用时间**************8# ftp_v = open('E:\FTP_test\FTP_v.txt', 'w', encoding='utf-8')# ftp_delay = open('E:\FTP_test\FTP_delay.txt', 'w', encoding='utf-8')if data_len - received_size > max_size:size = max_sizeelse:size = data_len - received_sizereceive_data = conn.recv(size) # 接收文本内容#************************************************旧程序问题3:基于socket的tcp无需手动在服务端进行ack的操作,既若无需要服务端可以不手动回发消息**********# conn.send("1".encode('utf8'))# delay=conn.recv(1024).decode('utf8') # 接受时延# conn.send(str(size).encode('utf-8')) ## v = conn.recv(1024).decode('utf8') # 接受速率# conn.send("1".encode('utf-8')) ## loss = conn.recv(1024).decode('utf8') # 接受loss# conn.send("1".encode('utf-8'))# print("delay:",delay)# delay_float = float(delay)# v_float = float(v)# loss_float = float(loss)# formatted_delay = "{:.2f}".format(delay_float)# formatted_v = "{:.2f}".format(v_float)# formatted_loss = "{:.2f}".format(loss_float)# cursor.execute("UPDATE slice_v2 SET " + ueX_v + "= %s," + ueX_delay + "=%s, " + ueX_loss + " =%s WHERE slice_id = %s", [str(formatted_v), str(formatted_delay), str(formatted_loss), slice_id])# cnx.commit()data+=receive_datareceive_datalen = len(receive_data)received_size += receive_datalen# print(delay, file=ftp_delay)# print(v, file=ftp_v)#print(loss)# ftp_delay.close()#ftp_v.close()#***********************************************旧程序问题4:接收文件后进行组装,组装后统一写入,且在一个线程里,耗时*******************************with open(file_path, 'wb') as fw:fw.write(data)# print("上传文件 " + str(file_name) + " 成功")#uploading_file()while True:encoding = 'utf-8'print('等待客户端连接...')conn, addr = soc.accept()print('客户端已连接:', addr)uploading_file()conn.close()
client_renew_old.py
# coding: UTF-8
import socket
import os
import struct
import json
import hashlib
import time# lient_soc.connect(('192.168.1.101', 8025))# 上传函数
def uploading_file(filepath,filename):client_soc = socket.socket()client_soc.connect(('192.168.31.111', 4101))try:encoding = 'utf-8'with open(filepath+filename, 'rb') as fr:#发送固定文件data = fr.read()head = {'data_len': len(data), 'file_name': filename} # 自定义头head_bytes = json.dumps(head).encode('utf8')head_bytes_len = struct.pack('i', len(head_bytes))client_soc.send(head_bytes_len)client_soc.recv(20).decode('utf8')client_soc.send(head_bytes)client_soc.recv(20).decode('utf8')print(str(int(len(data))/1024)+'KB')# client_soc.send(data)max_size = 65536*4#max_size = 1024# 计算需要分成几个数据包发送num_packets = len(data) // max_sizeif len(data) % max_size > 0:num_packets += 1# 循环分批发送数据for i in range(num_packets):# 将延时、速率传到txt文件中start = i * max_sizeend = start + max_size# if(i==num_packets-1):# end=len(data)packet = data[start:end]#从这里开始计时start = time.perf_counter()client_soc.sendall(packet)# client_soc.recv(1024).decode('utf8')#计算时间段# delay=float(time.perf_counter() - start)# s#v=round(len(packet)/1048576/delay,2)#MB/S# delay=int(delay*1000)# print("延时:",delay)# print("传输速率:",v)#client_soc.send(str(delay).encode('utf8'))# rev_size=client_soc.recv(1024).decode('utf8')# loss=(len(packet)-int(rev_size))/len(packet)# client_soc.send(str(v).encode('utf8'))# client_soc.recv(1024).decode('utf8')# client_soc.send(str(loss).encode('utf8'))# client_soc.recv(1024).decode('utf8')# print("丢包率:",loss)print("发送完毕")#uploading_file()except socket.error as e:print("发生错误:", str(e))finally:# 关闭连接client_soc.close()if __name__ == '__main__':filepath='C:/Users/yinzhao/Desktop/ftp/'filename='test.7z'i = 0while i<2 :encoding = "utf8"uploading_file(filepath,filename)i = i+1print(i)
目前有如下支持:
- 传输任何文件,若成功传输到服务器,不会损坏
- 传输速率:
max_size =655364
max_size =655368
该代码经过测试,有如下问题与特性:
4. 一次发送最大数据量,越大传输速率越快。之前每次发送1024,现在修改为65536*4
5. 在接收文件的循环中有大量统计 计算 操作数据库的代码,占用时间
6. 基于socket的tcp无需手动在服务端进行ack的操作,既若无需要服务端可以不手动回发消息。原程序有大量手动进行确认的过程,占用大量时间
7. 完整接收文件后再进行组装,组装后统一写入,也可以改成接收一次包写入一次
8. 所有程序都在同一个线程里,可以将数据传输 统计 数据写入放在不同的程序里,提供程序执行速度
修改代码
import socket
import json
import struct
import os
import time
import pymysql.cursors
import datetime
import threadingsoc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
HOST = '192.168.84.1'
PORT = 4101
soc.bind((HOST,PORT))
port_str = str(PORT)
slice_id = int(port_str[:2]) - 20
ueX_str = 'user'
ue_id = int(port_str[3])
ueX_delay = ueX_str + str(ue_id) + '_delay' #
ueX_v = ueX_str + str(ue_id) + '_v' #
ueX_loss = ueX_str + str(ue_id) + '_loss' #
soc.listen(5)
cnx = pymysql.connect(host='localhost', port=3306, user='root', password='123456', db='qt', charset='utf8mb4', connect_timeout=20)
cursor = cnx.cursor()def up_to_mysql(v,delay,loss):delay_float = float(delay)v_float = float(v)loss_float = float(loss)formatted_delay = "{:.2f}".format(delay_float)formatted_v = "{:.2f}".format(v_float)formatted_loss = "{:.2f}".format(loss_float)cursor.execute("UPDATE slice_v2 SET " + ueX_v + "= %s," + ueX_delay + "=%s, " + ueX_loss + " =%s WHERE slice_id = %s", [str(formatted_v), str(formatted_delay), str(formatted_loss), slice_id])cnx.commit()def write_file(file_path,data):with open(file_path, 'wb') as fw:fw.write(data)
# 上传函数
def uploading_file():ftp_dir = r'/home/lab/changeyaml/test' # 将传送的name.txt保存在该文件夹当中if not os.path.isdir(ftp_dir):os.mkdir(ftp_dir)head_bytes_len = conn.recv(4) # 拿到struct后的头长度head_len = struct.unpack('i', head_bytes_len)[0] # 取出真正的头长度# 拿到真正的头部内容conn.send("1".encode('utf8'))head_bytes = conn.recv(head_len)conn.send("1".encode('utf8'))# 反序列化后取出头head = json.loads(head_bytes)file_name = head['file_name']print(file_name)file_path = os.path.join(ftp_dir, file_name)data_len = head['data_len']received_size = 0data=b''max_size=65536*8time_begin = datetime.datetime.now()time_rev_begin=datetime.datetime.now()time_rev_end=datetime.datetime.now()datasize_rev_last=0while received_size < data_len:if data_len - received_size > max_size:size = max_sizeelse:size = data_len - received_sizereceive_data = conn.recv(size) # 接收文本内容 data+=receive_datareceive_datalen = len(receive_data)received_size += receive_datalentime_rev_end=datetime.datetime.now()time_gap=time_rev_end-time_rev_beginif time_gap.total_seconds()>=1 :time_rev_begin=time_rev_end# print((time_rev_end-time_rev_begin).total_seconds)data_rev_1s_KB=(received_size-datasize_rev_last)/1024# print("data:"+str(data_rev_1s_KB)+'KB')# print("speed:"+str(data_rev_1s_KB)+'KB/s')# print("speed:"+str(data_rev_1s_KB/1024)+'MB/s')delay= 1/(data_rev_1s_KB*1024/max_size)# print("delay:"+str(delay)+'s')delay_ms=delay*1000print("delay:"+str(delay)+'ms')datasize_rev_last=received_sizeup_mysql_thread = threading.Thread(target=up_to_mysql, args=(8*data_rev_1s_KB/1024, delay_ms,0))up_mysql_thread.start()time_end= datetime.datetime.now()time_transport=time_end-time_begintime_transport_s=time_transport.total_seconds()print("耗时:"+str(time_transport_s))data_len_KB=data_len/1024data_len_MB=data_len/(1024*1024)print("数据量:"+str(data_len_KB)+"KB")v_average=data_len_KB/time_transport_s# print("平均速度:"+str(v_average)+'KB/s')print("平均速度:"+str(data_len_MB/time_transport_s)+'MB/s')write_file_thread = threading.Thread(target=write_file, args=(file_path, data))write_file_thread.start()while True:encoding = 'utf-8'print('等待客户端连接...')conn, addr = soc.accept()print('客户端已连接:', addr)uploading_file()conn.close()
# coding: UTF-8
import socket
import os
import struct
import json
import hashlib
import time# 上传函数
def uploading_file(filepath,filename):client_soc = socket.socket()client_soc.connect(('192.168.1.21', 4101))try:encoding = 'utf-8'with open(filepath+filename, 'rb') as fr:#发送固定文件data = fr.read()head = {'data_len': len(data), 'file_name': filename} # 自定义头head_bytes = json.dumps(head).encode('utf8')head_bytes_len = struct.pack('i', len(head_bytes))client_soc.send(head_bytes_len)client_soc.recv(20).decode('utf8')client_soc.send(head_bytes)client_soc.recv(20).decode('utf8')print(str(int(len(data))/1024)+'KB')# client_soc.send(data)max_size = 65536*8#max_size = 1024# 计算需要分成几个数据包发送num_packets = len(data) // max_sizeif len(data) % max_size > 0:num_packets += 1# 循环分批发送数据for i in range(num_packets):start = i * max_sizeend = start + max_size# if(i==num_packets-1):# end=len(data)packet = data[start:end]#从这里开始计时start = time.perf_counter()client_soc.sendall(packet)print("发送完毕")#uploading_file()except socket.error as e:print("发生错误:", str(e))finally:# 关闭连接client_soc.close()if __name__ == '__main__':filepath='C:/Users/QD689/Desktop/ftp1.01/'filename='name.txt'i = 0while i<3 :encoding = "utf8"uploading_file(filepath,filename)uploading_file(filepath,filename)i = i+1print(i)
经过精简代码与多线程使用,以及合理配置每次发送数据量大小,ftp文件传输速率极大提高了,并且可以正确测算到速率与时延