Ravi21 commited on
Commit
7b17801
·
verified ·
1 Parent(s): ca98e8d

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -119
app.py DELETED
@@ -1,119 +0,0 @@
1
- import time
2
- from options.test_options import TestOptions
3
- from data.data_loader_test import CreateDataLoader
4
- from models.networks import ResUnetGenerator, load_checkpoint
5
- from models.afwm import AFWM
6
- import torch.nn as nn
7
- import os
8
- import numpy as np
9
- import torch
10
- import cv2
11
- import torch.nn.functional as F
12
-
13
- import io
14
- from PIL import Image
15
- from flask import Flask, jsonify, request
16
- from tqdm.auto import tqdm
17
-
18
- app = Flask(__name__)
19
-
20
- opt = TestOptions().parse()
21
-
22
- # list human-cloth pairs
23
- with open('demo.txt', 'w') as file:
24
- lines = [f'input.png {cloth_img_fn}\n' for cloth_img_fn in os.listdir('dataset/test_clothes')]
25
- file.writelines(lines)
26
-
27
- warp_model = AFWM("", 3)
28
- warp_model.eval()
29
- warp_model.cuda()
30
- load_checkpoint(warp_model, 'checkpoints/PFAFN/warp_model_final.pth')
31
-
32
- gen_model = ResUnetGenerator(7, 4, 5, ngf=64, norm_layer=nn.BatchNorm2d)
33
- gen_model.eval()
34
- gen_model.cuda()
35
- load_checkpoint(gen_model, 'checkpoints/PFAFN/gen_model_final.pth')
36
-
37
-
38
- def save_cloth_transfers(image_bytes):
39
-
40
- opt_name = 'demo'
41
- opt_batchSize = 1
42
-
43
- image = Image.open(io.BytesIO(image_bytes))
44
- image.save('dataset/test_img/input.png')
45
-
46
- data_loader = CreateDataLoader(opt)
47
- dataset = data_loader.load_data()
48
- dataset_size = len(data_loader)
49
-
50
- start_epoch, epoch_iter = 1, 0
51
-
52
- total_steps = (start_epoch - 1) * dataset_size + epoch_iter
53
- step = 0
54
- step_per_batch = dataset_size / opt_batchSize
55
-
56
- for epoch in range(1, 2):
57
- for i, data in tqdm(enumerate(dataset, start=epoch_iter)):
58
- iter_start_time = time.time()
59
- total_steps += opt_batchSize
60
- epoch_iter += opt_batchSize
61
-
62
- real_image = data['image']
63
- clothes = data['clothes']
64
- ##edge is extracted from the clothes image with the built-in function in python
65
- edge = data['edge']
66
- edge = torch.FloatTensor((edge.detach().numpy() > 0.5).astype(np.int))
67
- clothes = clothes * edge
68
-
69
- flow_out = warp_model(real_image.cuda(), clothes.cuda())
70
- warped_cloth, last_flow, = flow_out
71
- warped_edge = F.grid_sample(edge.cuda(), last_flow.permute(0, 2, 3, 1),
72
- mode='bilinear', padding_mode='zeros')
73
-
74
- gen_inputs = torch.cat([real_image.cuda(), warped_cloth, warped_edge], 1)
75
- gen_outputs = gen_model(gen_inputs)
76
- p_rendered, m_composite = torch.split(gen_outputs, [3, 1], 1)
77
- p_rendered = torch.tanh(p_rendered)
78
- m_composite = torch.sigmoid(m_composite)
79
- m_composite = m_composite * warped_edge
80
- p_tryon = warped_cloth * m_composite + p_rendered * (1 - m_composite)
81
-
82
- path = 'results/' + opt_name
83
- os.makedirs(path, exist_ok=True)
84
- sub_path = path + '/PFAFN'
85
- os.makedirs(sub_path, exist_ok=True)
86
-
87
- if step % 1 == 0:
88
- a = real_image.float().cuda()
89
- b = clothes.cuda()
90
- c = p_tryon
91
- combine = torch.cat([a[0], b[0], c[0]], 2).squeeze()
92
- cv_img = (combine.permute(1, 2, 0).detach().cpu().numpy() + 1) / 2
93
- rgb = (cv_img * 255).astype(np.uint8)
94
- bgr = cv2.cvtColor(rgb, cv2.COLOR_RGB2BGR)
95
- cv2.imwrite(sub_path + '/' + str(step) + '.jpg', bgr)
96
-
97
- step += 1
98
- if epoch_iter >= dataset_size:
99
- break
100
-
101
- return True
102
-
103
-
104
- @app.route('/predict')
105
- def predict():
106
- if request.method == 'POST':
107
- print('#'*100)
108
- file = request.files['file']
109
- image_bytes = file.read()
110
- save_cloth_transfers(image_bytes=image_bytes)
111
- return jsonify({'status': True})
112
- else:
113
- return jsonify({'message': "Only accept POST requests"})
114
-
115
-
116
- if __name__ == '__main__':
117
- app.run()
118
-
119
-