This is a bash script to leech a whole series or a particular chapter from
www.mangafox.com. I wrote this script for my niece, hope it helps you too. Using is simple, 1st argument is the url to a particular manga and 2nd (optional) argument is the the chapter in that manga.
** code comes with no warranty or whatsoever. I'm not responsible if it breaks anybody's computer. Use it at your own risk. You have been warned !!! **
Example:
mangafox.sh http://www.mangafox.com/manga/school_rumble/ 282
or
mangafox.sh School_Rumble 282
or
mangafox.sh
http://www.mangafox.com/manga/school_rumble/
or
mangafox.sh
School_Rumble
1 #!/bin/bash
2
3 RED='\e[1;31m'
4 CYAN='\e[1;36m'
5 NC='\e[0m' # No Color
6 YLW='\e[1;33m'
7 WHITE='\e[0;37m'
8
9 main_url="http://www.mangafox.com/manga"
10 wget_param="--tries=10 --retry-connrefused"
11
12 ## usage
13 if [ -z $1 ]
14 then
15 echo
16 echo -e "${CYAN}[*]${RED} usage: `basename $0` manga_url${NC}"
17 echo
18 exit
19 else
20 manga_name=`echo $1 | awk -F '/' '{for(i=NF;i>=0;i--){if(length($i)>0){
21 print $i;break;}}}'`
22 if [ ! -z "$2" ]
23 then
24 specific_chapter="$2"
25 fi
26 fi
27 ##
28
29 function find_chapters()
30 {
31 TMP="${manga_name}_find_chapters.tmp"
32
33 echo -n -e "${CYAN}[*]${RED} Finding total chapters in ${CYAN} $manga_name
34 ${NC}= "
35 wget $wget_param -q -nv "${main_url}/${manga_name}/?no_warning=1" -O $TMP
36 chapters=`cat $TMP | grep -e 'class="chico">' | grep -v -e '</td>' -e
37 '#listing' | awk -F '"' '{print $2}' | sed 's/^\/manga\///g'`
38
39 count=0
40 for c in $chapters
41 do
42 mkdir -p ./$c
43 #echo $c ##debug
44 count=$((count+1))
45 done
46 echo -e "${CYAN}${count}${NC}"
47 }
48
49
50 function parse_chapter_n_download()
51 {
52 PAGES="pages.tmp"
53 PAGE="page_html.tmp"
54
55 for chapter in $chapters
56 do
57 pwd=`pwd`
58
59 if [ "$specific_chapter" ]
60 then
61 mkdir -p "$specific_chapter" 2>/dev/null
62 chapter=$specific_chapter
63 fi
64
65 cd ./$chapter
66
67 ## find total number of pages in chapter
68 echo -n -e "${CYAN}[*]${RED} Total pages in ${CYAN} $chapter ${NC}= "
69 wget -q -nv $wget_param $main_url/$chapter -O $PAGES
70 pages=`cat $PAGES | grep '^.*<option value=.*<\/select>.*$' -m1 | awk '{
71 for(i=1;i<=NF;i++){if($(i-1)~"value"){print $i}}}' | sed -e
72 's/selected//g;s/option//g;s/[<>\/"=]//g;'`
73
74 n=0
75 for k in $pages
76 do
77 #echo $k ##debug
78 n=$((n+1))
79 done
80 echo -e "${CYAN}$n${NC}"
81
82 ## now i have a list of (1,2,3...).html pages
83 for p in $pages
84 do
85 wget $wget_param -q -nv $main_url/$chapter/${p}.html -O $PAGE
86 img_url=`cat $PAGE | grep 'onclick="return enlarge();' | awk '{for(i=1;i<
87 =NF;i++){if($i~"http://"){print $i}}}' | sed 's/src=//g;s/["=]//g'`
88 img=`echo $img_url | awk -F '/' '{print $NF}'`
89 echo -e -n "${CYAN}>>${WHITE} $img_url ${RED} ... ${NC}"
90 wget $wget_param -q -nv $img_url
91 if [ -e $img ]
92 then
93 echo -e "${CYAN}[done]${NC}"
94 else
95 echo -e "${YLW}[failed]${NC}"
96 fi
97 done
98
99 cd $pwd
100
101 if [ "$specific_chapter" ]
102 then
103 exit;
104 fi
105 done
106 }
107
108
109 function main()
110 {
111 rm -rf ${manga_name}
112 find_chapters
113 parse_chapter_n_download
114 }
115
116 main