gagndeep commited on
Commit
2b7cea2
·
1 Parent(s): 46d271d
app.py CHANGED
@@ -1,4 +1,4 @@
1
- """SHARP Gradio demo (Modern, Single-Page UI).
2
 
3
  This Space:
4
  - Runs Apple's SHARP model to predict a 3D Gaussian scene from a single image.
@@ -8,6 +8,10 @@ This Space:
8
 
9
  from __future__ import annotations
10
 
 
 
 
 
11
  import json
12
  from pathlib import Path
13
  from typing import Final
@@ -29,22 +33,25 @@ EXAMPLES_DIR: Final[Path] = ASSETS_DIR / "examples"
29
  # Valid image extensions for discovery
30
  IMAGE_EXTS: Final[tuple[str, ...]] = (".png", ".jpg", ".jpeg", ".webp")
31
 
32
- # CSS for a responsive, contained layout
33
  CSS: Final[str] = """
34
  .gradio-container {
35
- max-width: 1400px !important;
36
  margin: 0 auto;
37
  }
38
- /* constrain media height so it doesn't take up the whole screen */
 
39
  #input-image img, #output-video video {
40
- max-height: 500px;
41
  width: 100%;
42
  object-fit: contain;
43
  }
44
- /* Make the generate button pop slightly */
 
45
  #run-btn {
46
- font-size: 1.1em;
47
  font-weight: bold;
 
48
  }
49
  """
50
 
@@ -59,7 +66,6 @@ def _ensure_dir(path: Path) -> Path:
59
  def get_example_files() -> list[list[str]]:
60
  """
61
  Scans assets/examples for images to populate the gr.Examples component.
62
- Returns a list of lists: [['path/to/img1.jpg'], ['path/to/img2.png']]
63
  """
64
  _ensure_dir(EXAMPLES_DIR)
65
 
@@ -102,7 +108,6 @@ def run_sharp(
102
  if not image_path:
103
  raise gr.Error("Please upload or select an input image first.")
104
 
105
- # Validate output resolution
106
  out_long_side_val: int | None = (
107
  None if int(output_long_side) <= 0 else int(output_long_side)
108
  )
@@ -110,8 +115,7 @@ def run_sharp(
110
  try:
111
  progress(0.1, desc="Initializing model...")
112
 
113
- # Convert string dropdown back to Enum if needed, or pass string if model accepts it
114
- # Assuming model_utils handles string conversion or we map it here:
115
  traj_enum = TrajectoryType[trajectory_type.upper()] if hasattr(TrajectoryType, trajectory_type.upper()) else trajectory_type
116
 
117
  progress(0.3, desc="Predicting Gaussians...")
@@ -147,10 +151,10 @@ def run_sharp(
147
  # -----------------------------------------------------------------------------
148
 
149
  def build_demo() -> gr.Blocks:
150
- # Use the Default theme for a clean, modern look
151
  theme = gr.themes.Default()
152
 
153
- with gr.Blocks(theme=theme, css=CSS, title="SHARP 3D") as demo:
154
 
155
  # --- Header ---
156
  with gr.Row():
@@ -163,26 +167,25 @@ def build_demo() -> gr.Blocks:
163
  )
164
 
165
  # --- Main Interface ---
166
- with gr.Row():
167
 
168
- # --- Left Column: Input & Controls ---
169
- with gr.Column(scale=1):
170
  image_in = gr.Image(
171
  label="Input Image",
172
  type="filepath",
173
  sources=["upload", "clipboard"],
174
  elem_id="input-image",
175
- height=400
176
  )
177
 
178
- # Collapsible Advanced Settings for a cleaner UI
179
  with gr.Accordion("⚙️ Advanced Configuration", open=False):
180
  with gr.Row():
181
  trajectory = gr.Dropdown(
182
  label="Camera Trajectory",
183
  choices=["swipe", "shake", "rotate", "rotate_forward"],
184
  value="rotate_forward",
185
- info="Camera movement for video preview"
186
  )
187
  output_res = gr.Dropdown(
188
  label="Resolution (Long Side)",
@@ -203,12 +206,12 @@ def build_demo() -> gr.Blocks:
203
  run_btn = gr.Button("✨ Generate 3D Scene", variant="primary", elem_id="run-btn")
204
 
205
  # --- Right Column: Output ---
206
- with gr.Column(scale=1):
207
  video_out = gr.Video(
208
  label="Preview Trajectory",
209
  elem_id="output-video",
210
  autoplay=True,
211
- height=400
212
  )
213
  with gr.Group():
214
  ply_download = gr.DownloadButton(
@@ -218,7 +221,6 @@ def build_demo() -> gr.Blocks:
218
  status_md = gr.Markdown("Ready to run.")
219
 
220
  # --- Footer: Examples ---
221
- # Standard Gradio Examples component
222
  example_files = get_example_files()
223
  if example_files:
224
  gr.Examples(
@@ -243,21 +245,10 @@ def build_demo() -> gr.Blocks:
243
  concurrency_limit=1
244
  )
245
 
246
- # --- Citation ---
247
  with gr.Accordion("About & Citation", open=False):
248
  gr.Markdown(
249
  """
250
  **SHARP: Sharp Monocular View Synthesis in Less Than a Second** (Apple, 2025).
251
-
252
- If you use this model, please cite:
253
- ```bibtex
254
- @inproceedings{Sharp2025:arxiv,
255
- title = {Sharp Monocular View Synthesis in Less Than a Second},
256
- author = {Mescheder, Dong, Li, Bai, et al.},
257
- year = {2025},
258
- journal = {arXiv preprint arXiv:2512.10685}
259
- }
260
- ```
261
  """
262
  )
263
 
@@ -271,4 +262,5 @@ _ensure_dir(OUTPUTS_DIR)
271
 
272
  if __name__ == "__main__":
273
  demo = build_demo()
 
274
  demo.queue().launch(allowed_paths=[str(ASSETS_DIR)])
 
1
+ """SHARP Gradio demo (Full-Width UI).
2
 
3
  This Space:
4
  - Runs Apple's SHARP model to predict a 3D Gaussian scene from a single image.
 
8
 
9
  from __future__ import annotations
10
 
11
+ import warnings
12
+ # Suppress the internal torch.distributed warning from ZeroGPU wrappers
13
+ warnings.filterwarnings("ignore", category=FutureWarning, module="torch.distributed")
14
+
15
  import json
16
  from pathlib import Path
17
  from typing import Final
 
33
  # Valid image extensions for discovery
34
  IMAGE_EXTS: Final[tuple[str, ...]] = (".png", ".jpg", ".jpeg", ".webp")
35
 
36
+ # CSS for a fluid, full-width layout
37
  CSS: Final[str] = """
38
  .gradio-container {
39
+ max-width: 95% !important; /* Fill 95% of the screen width */
40
  margin: 0 auto;
41
  }
42
+
43
+ /* Constrain media height so it doesn't overflow vertically on huge screens */
44
  #input-image img, #output-video video {
45
+ max-height: 65vh; /* Use Viewport Height units for better scaling */
46
  width: 100%;
47
  object-fit: contain;
48
  }
49
+
50
+ /* Make the generate button prominent */
51
  #run-btn {
52
+ font-size: 1.2rem;
53
  font-weight: bold;
54
+ margin-top: 1rem;
55
  }
56
  """
57
 
 
66
  def get_example_files() -> list[list[str]]:
67
  """
68
  Scans assets/examples for images to populate the gr.Examples component.
 
69
  """
70
  _ensure_dir(EXAMPLES_DIR)
71
 
 
108
  if not image_path:
109
  raise gr.Error("Please upload or select an input image first.")
110
 
 
111
  out_long_side_val: int | None = (
112
  None if int(output_long_side) <= 0 else int(output_long_side)
113
  )
 
115
  try:
116
  progress(0.1, desc="Initializing model...")
117
 
118
+ # Convert string dropdown back to Enum if needed
 
119
  traj_enum = TrajectoryType[trajectory_type.upper()] if hasattr(TrajectoryType, trajectory_type.upper()) else trajectory_type
120
 
121
  progress(0.3, desc="Predicting Gaussians...")
 
151
  # -----------------------------------------------------------------------------
152
 
153
  def build_demo() -> gr.Blocks:
154
+ # Use Default theme
155
  theme = gr.themes.Default()
156
 
157
+ with gr.Blocks(theme=theme, css=CSS, title="SHARP 3D", fill_width=True) as demo:
158
 
159
  # --- Header ---
160
  with gr.Row():
 
167
  )
168
 
169
  # --- Main Interface ---
170
+ with gr.Row(equal_height=False):
171
 
172
+ # --- Left Column: Input ---
173
+ with gr.Column(scale=1, min_width=500):
174
  image_in = gr.Image(
175
  label="Input Image",
176
  type="filepath",
177
  sources=["upload", "clipboard"],
178
  elem_id="input-image",
179
+ height=None # Handled by CSS
180
  )
181
 
182
+ # Collapsible Advanced Settings
183
  with gr.Accordion("⚙️ Advanced Configuration", open=False):
184
  with gr.Row():
185
  trajectory = gr.Dropdown(
186
  label="Camera Trajectory",
187
  choices=["swipe", "shake", "rotate", "rotate_forward"],
188
  value="rotate_forward",
 
189
  )
190
  output_res = gr.Dropdown(
191
  label="Resolution (Long Side)",
 
206
  run_btn = gr.Button("✨ Generate 3D Scene", variant="primary", elem_id="run-btn")
207
 
208
  # --- Right Column: Output ---
209
+ with gr.Column(scale=1, min_width=500):
210
  video_out = gr.Video(
211
  label="Preview Trajectory",
212
  elem_id="output-video",
213
  autoplay=True,
214
+ height=None # Handled by CSS
215
  )
216
  with gr.Group():
217
  ply_download = gr.DownloadButton(
 
221
  status_md = gr.Markdown("Ready to run.")
222
 
223
  # --- Footer: Examples ---
 
224
  example_files = get_example_files()
225
  if example_files:
226
  gr.Examples(
 
245
  concurrency_limit=1
246
  )
247
 
 
248
  with gr.Accordion("About & Citation", open=False):
249
  gr.Markdown(
250
  """
251
  **SHARP: Sharp Monocular View Synthesis in Less Than a Second** (Apple, 2025).
 
 
 
 
 
 
 
 
 
 
252
  """
253
  )
254
 
 
262
 
263
  if __name__ == "__main__":
264
  demo = build_demo()
265
+ # allowed_paths needed so Gradio can serve files from the assets directory
266
  demo.queue().launch(allowed_paths=[str(ASSETS_DIR)])
assets/examples/WildRGBD_TV_scene_000_00028_0000-0002.jpg ADDED

Git LFS Details

  • SHA256: c1c49dbd483136e5b5dcb4cb3f65c46d72773c3f1af6b812dd0cfeb2f21bd0a6
  • Pointer size: 130 Bytes
  • Size of remote file: 51.2 kB