@@ -341,41 +341,40 @@ def parse_description(element):
341
341
342
342
# iterate through the found versions
343
343
results = []
344
- for ul in soup .find ("div" , {"class" : "page-footer-main-text" }).find_all ("ul" ):
345
- for li in ul .find_all ("li" , class_ = "os" ):
346
- description_element = li .find ("span" , class_ = "name" ).find ("small" )
347
- arch = li .find ("span" , class_ = "build" ).text
348
- channel = li .find ("span" , class_ = "build-var" ).text
349
- name = li .find ("span" , class_ = "name" ).find (text = True , recursive = False )
350
- url = li .find ("a" , href = True )["href" ]
351
-
352
- description_data = parse_description (description_element )
353
- if description_data is None :
354
- continue
355
-
356
- info = {}
357
- info ["arch" ] = clean (arch )
358
- info ["build_date" ] = description_data ["date" ]
359
- info ["channel" ] = clean (channel )
360
- info ["filename" ] = clean (url ).split ("/" )[- 1 ]
361
- info ["hash" ] = description_data ["hash" ]
362
- info ["name" ] = clean (name ) + " " + clean (channel )
363
- info ["size" ] = description_data ["size" ]
364
- info ["type" ] = description_data ["type" ]
365
- info ["url" ] = clean (url )
366
- info ["version" ] = (
367
- description_data ["name" ] + "_" + description_data ["hash" ]
368
- )
344
+ for li in soup .find_all ("li" , class_ = "os" ):
345
+ description_element = li .find ("div" , class_ = "name" ).find ("small" )
346
+ arch = li .find ("span" , class_ = "build" ).find (text = True , recursive = False )
347
+ channel = li .find ("span" , class_ = "build-var" ).text
348
+ name = li .find ("div" , class_ = "name" ).find (text = True , recursive = False )
349
+ url = li .find ("a" , href = True )["href" ]
350
+
351
+ description_data = parse_description (description_element )
352
+ if description_data is None :
353
+ continue
354
+
355
+ info = {}
356
+ info ["arch" ] = clean (arch )
357
+ info ["build_date" ] = description_data ["date" ]
358
+ info ["channel" ] = clean (channel )
359
+ info ["filename" ] = clean (url ).split ("/" )[- 1 ]
360
+ info ["hash" ] = description_data ["hash" ]
361
+ info ["name" ] = clean (name ) + " " + clean (channel )
362
+ info ["size" ] = description_data ["size" ]
363
+ info ["type" ] = description_data ["type" ]
364
+ info ["url" ] = clean (url )
365
+ info ["version" ] = (
366
+ description_data ["name" ] + "_" + description_data ["hash" ]
367
+ )
369
368
370
- # Set "os" based on URL
371
- if "windows" in clean (url ):
372
- info ["os" ] = "windows"
373
- elif "darwin" in clean (url ):
374
- info ["os" ] = "osx"
375
- else :
376
- info ["os" ] = "linux"
369
+ # Set "os" based on URL
370
+ if "windows" in clean (url ):
371
+ info ["os" ] = "windows"
372
+ elif "darwin" in clean (url ):
373
+ info ["os" ] = "osx"
374
+ else :
375
+ info ["os" ] = "linux"
377
376
378
- results .append (info )
377
+ results .append (info )
379
378
380
379
finallist = results
381
380
0 commit comments