Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -47,9 +47,34 @@ state_dict = torch.load(filepath, map_location="cpu")
|
|
47 |
model.load_state_dict(state_dict)
|
48 |
model = model.to(DEVICE).eval()
|
49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
title = "# Depth Anything V2"
|
51 |
-
description = """
|
52 |
-
Please refer to
|
53 |
|
54 |
@spaces.GPU
|
55 |
def predict_depth(image):
|
@@ -61,12 +86,18 @@ with gr.Blocks(css=css) as demo:
|
|
61 |
gr.Markdown("### Depth Prediction demo")
|
62 |
|
63 |
with gr.Row():
|
64 |
-
input_image = gr.
|
65 |
-
|
66 |
-
|
67 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
raw_file = gr.File(label="16-bit raw output (can be considered as disparity)", elem_id="download",)
|
69 |
-
|
70 |
cmap = matplotlib.colormaps.get_cmap('Spectral_r')
|
71 |
|
72 |
def on_submit(image):
|
@@ -92,9 +123,9 @@ with gr.Blocks(css=css) as demo:
|
|
92 |
|
93 |
submit.click(on_submit, inputs=[input_image], outputs=[depth_image_slider, gray_depth_file, raw_file])
|
94 |
|
95 |
-
example_files = os.listdir('assets/
|
96 |
example_files.sort()
|
97 |
-
example_files = [os.path.join('assets/
|
98 |
examples = gr.Examples(examples=example_files, inputs=[input_image], outputs=[depth_image_slider, gray_depth_file, raw_file], fn=on_submit)
|
99 |
|
100 |
|
|
|
47 |
model.load_state_dict(state_dict)
|
48 |
model = model.to(DEVICE).eval()
|
49 |
|
50 |
+
def findNormals(gray_depth, format):
|
51 |
+
|
52 |
+
d_im = cv2.cvtColor(cv2.imread(gray_depth).astype(np.uint8), cv2.COLOR_BGR2GRAY)
|
53 |
+
zy, zx = np.gradient(d_im)
|
54 |
+
# You may also consider using Sobel to get a joint Gaussian smoothing and differentation
|
55 |
+
# to reduce noise
|
56 |
+
#zx = cv2.Sobel(d_im, cv2.CV_64F, 1, 0, ksize=5)
|
57 |
+
#zy = cv2.Sobel(d_im, cv2.CV_64F, 0, 1, ksize=5)
|
58 |
+
|
59 |
+
if format == "opengl":
|
60 |
+
zy = -zy
|
61 |
+
|
62 |
+
normal = np.dstack((np.ones_like(d_im), -zy, -zx))
|
63 |
+
n = np.linalg.norm(normal, axis=2)
|
64 |
+
normal[:, :, 0] /= n
|
65 |
+
normal[:, :, 1] /= n
|
66 |
+
normal[:, :, 2] /= n
|
67 |
+
|
68 |
+
# offset and rescale values to be in 0-255
|
69 |
+
normal += 1
|
70 |
+
normal /= 2
|
71 |
+
normal *= 255
|
72 |
+
|
73 |
+
return (normal[:, :, ::-1]).astype(np.uint8)
|
74 |
+
|
75 |
title = "# Depth Anything V2"
|
76 |
+
description = """Unofficial demo for **Depth Anything V2**.
|
77 |
+
Please refer to their [paper](https://arxiv.org/abs/2406.09414), [project page](https://depth-anything-v2.github.io), and [github](https://github.com/DepthAnything/Depth-Anything-V2) for more details."""
|
78 |
|
79 |
@spaces.GPU
|
80 |
def predict_depth(image):
|
|
|
86 |
gr.Markdown("### Depth Prediction demo")
|
87 |
|
88 |
with gr.Row():
|
89 |
+
input_image = gr.ImageEditor(label="Input Image", layers=True, sources=('upload', 'clipboard'), show_download_button=True, type="numpy", interactive=True, transforms=(None,), eraser=gr.Eraser(), brush=gr.Brush(default_size=1, colors=['black', '#505050', '#a0a0a0', 'white']), elem_id="img-display-input")
|
90 |
+
with gr.Tab("Depth"):
|
91 |
+
depth_image_slider = ImageSlider(label="Depth Map with Slider View", elem_id='img-display-output', position=0.5)
|
92 |
+
gray_depth_file = gr.File(label="Grayscale depth map", elem_id="download",)
|
93 |
+
submit = gr.Button(value="Compute Depth")
|
94 |
+
with gr.Tab("Normals"):
|
95 |
+
normals_out = gr.Image(label="Normal map", interactive=False)
|
96 |
+
format_normals = gr.Radio(choices=["directx", "opengl"])
|
97 |
+
find_normals = gr.Button("Find normals")
|
98 |
+
find_normals.click(fn=findNormals, inputs=[gray_depth_file, format_normals], outputs=[normals_out])
|
99 |
+
|
100 |
raw_file = gr.File(label="16-bit raw output (can be considered as disparity)", elem_id="download",)
|
|
|
101 |
cmap = matplotlib.colormaps.get_cmap('Spectral_r')
|
102 |
|
103 |
def on_submit(image):
|
|
|
123 |
|
124 |
submit.click(on_submit, inputs=[input_image], outputs=[depth_image_slider, gray_depth_file, raw_file])
|
125 |
|
126 |
+
example_files = os.listdir('assets/drawn_examples')
|
127 |
example_files.sort()
|
128 |
+
example_files = [os.path.join('assets/drawn_examples', filename) for filename in example_files]
|
129 |
examples = gr.Examples(examples=example_files, inputs=[input_image], outputs=[depth_image_slider, gray_depth_file, raw_file], fn=on_submit)
|
130 |
|
131 |
|