爬虫的基础邹形

来源:互联网 发布:大数据安全管理办法 编辑:程序博客网 时间:2024/04/26 08:05
三个方法/** * 网络爬虫基础邹形 * @author Administrator * */public class urlreptile {    public static void main(String[] args) {        /*try {//输入流            URL url =new URL("https://www.baidu.com");            InputStream input=url.openStream();            byte [] flush =new byte[1024];            int len=0;        while(-1!=(len=input.read(flush))){                System.out.println(new String(flush,0,len));            }            input.close();        } catch (MalformedURLException e) {            e.printStackTrace();        } catch (IOException e) {            e.printStackTrace();        }*/        /*try {//转换流InputStreamReader里面可以输入流和字符转换参数            URL url = new URL("https://www.baidu.com");            BufferedReader bs = new BufferedReader(new InputStreamReader(url.openStream(),"utf-8"));            String msg=null;            while(null!=(msg=bs.readLine())){                System.out.println(msg);            }        } catch (MalformedURLException e) {            System.out.println("域名解析异常");            e.printStackTrace();        } catch (IOException e) {            System.out.println("IO流异常");            e.printStackTrace();        }*/        try {//转换流OutputStreamWriter和InputStreamReader里面可以有字符转换            URL url =new URL("https://www.baidu.com");            BufferedReader bs = new BufferedReader(new InputStreamReader(url.openStream(),"utf-8"));            BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("baidu.html"),"utf-8"));            String msg=null;            while(null!=(msg=bs.readLine())){                bw.append(msg);                bw.newLine();            }            bw.flush();            bw.close();            bs.close();        } catch (MalformedURLException e) {            e.printStackTrace();        } catch (IOException e) {            e.printStackTrace();        }    }}
0 0
原创粉丝点击