Tuesday, December 15, 2009

OneManga.com leecher

This one leeches manga from www.onemanga.com. I was too lazy to implement the chapter ripper part. It will only leech the whole series with the right URL as argument.

** code comes with no warranty or whatsoever. I'm not responsible if it breaks anybody's computer. Use it at your own risk. You have been warned !!! **

Example:
onemanga.sh http://www.onemanga.com/School_Rumble/

    1 #!/bin/bash
2
3 RED='\e[0;31m'
4 CYAN='\e[0;36m'
5 NC='\e[0m' # No Color
6
7 if [ -z $1 ]
8 then
9 echo
10 echo "[*] usage: `basename $0` manga_url"
11 echo
12 exit
13 else
14 manga_name=`echo $1 | awk -F '/' '{for(i=NF;i>=0;i--){if(length($i)>0){
15 print $i;break;}}}'`
16 fi
17
18 main_url="http://www.onemanga.com"
19
20 rm -rf ${manga_name}
21
22 ##finding list of chapters
23 echo -n -e "${CYAN}[*]${RED} Finding total chapters in ${CYAN} $manga_name
24 ${NC}= "
25 wget -q -nv ${main_url}/${manga_name} -O tmp.txt
26 chapters=`cat tmp.txt | grep '<td class="ch-subject"><a href="/' | awk -F
27 '"' '{print $4}'`
28
29
30 count=0
31 for c in $chapters
32 do
33 mkdir -p ./$c
34 count=$((count+1))
35 done
36 echo -e "${CYAN}${count}${NC}"
37 ##
38
39 ##parse chapter and download
40 for chapter in $chapters
41 do
42 pwd=`pwd`
43
44 cd ./$chapter
45
46 ## initial wget
47 echo -e "${CYAN}[*]${RED} Trying to find the image base url${NC}"
48
49 ## find collect the first page in the chapter
50 wget -q -nv $main_url/$chapter -O tmp.txt
51 page=`cat tmp.txt | grep "Begin reading" | awk -F '"' '{print $2}'`
52
53 ## now go to that page & find image base
54 wget -q -nv ${main_url}${page} -O tmp.txt 2>/dev/null
55 image=`cat tmp.txt | grep "img_url" | awk -F '"' '{for(i=1;i<NF;i++){if($i
56 ~ "jpg"){print $i}}}' | awk -F '/' '{print $NF}'`
57 image_base=`cat tmp.txt | grep "img_url" | awk -F '"' '{for(i=1;i<NF;i++){
58 if($i ~ "jpg"){print $i}}}' | sed s/"$image"//g`
59 echo -e "${RED}>>${NC} $image_base"
60
61 ## download
62 d=$((d+1))
63 names=`cat tmp.txt | awk '{for(i=1;i<=NF;i++){if($i ~ "selected")go++}{if(
64 go>1){print}}}' | grep -e "</option>" -e 'credits</option>' -e 'extra*<
65 /option>' -e 'cover*</option>' | awk -F '"' '{print $2}'`
66
67 n=0
68 for k in $names
69 do
70 n=$((n+1))
71 done
72
73 echo -e "${CYAN}[*]${RED} Downloading ${CYAN}$n ${RED}images from chapter
74 ${CYAN}$chapter ${RED} ... ##${CYAN}$((count-d+1))${RED}##${CYAN}$count${
75 RED}##${NC}"
76 for k in $names
77 do
78 #echo -e "${RED}>>${NC} ${image_base}${k}.jpg"
79 wget -nv "${image_base}${k}.jpg"
80 done
81
82 cd $pwd
83 done
84 ##