@@ -251,54 +251,6 @@ def test_get_markdownify_status_real():
251
251
assert "request_id" in status_response
252
252
253
253
254
-
255
-
256
-
257
- def test_crawl_with_all_params_real ():
258
- """Test crawl with all parameters"""
259
- if not os .getenv ("SGAI_API_KEY" ):
260
- pytest .skip ("SGAI_API_KEY not set" )
261
-
262
- data_schema = {
263
- "type" : "object" ,
264
- "properties" : {
265
- "title" : {"type" : "string" },
266
- "description" : {"type" : "string" }
267
- }
268
- }
269
-
270
- with Client .from_env () as client :
271
- response = client .crawl (
272
- url = "https://example.com" ,
273
- prompt = "Extract comprehensive page data" ,
274
- data_schema = data_schema ,
275
- cache_website = True ,
276
- depth = 1 ,
277
- max_pages = 1 ,
278
- same_domain_only = True ,
279
- batch_size = 5
280
- )
281
-
282
- """Test getting crawl status"""
283
- if not os .getenv ("SGAI_API_KEY" ):
284
- pytest .skip ("SGAI_API_KEY not set" )
285
-
286
- with Client .from_env () as client :
287
- # First create a crawl request
288
- data_schema = {
289
- "type" : "object" ,
290
- "properties" : {
291
- "title" : {"type" : "string" }
292
- }
293
- }
294
-
295
- initial_response = client .crawl (
296
- url = "https://example.com" ,
297
- prompt = "Extract page titles" ,
298
- data_schema = data_schema
299
- )
300
- assert "task_id" in initial_response
301
-
302
254
"""Test submitting feedback without text"""
303
255
if not os .getenv ("SGAI_API_KEY" ):
304
256
pytest .skip ("SGAI_API_KEY not set" )
0 commit comments