防止用户恶意刷新过滤器

来源:互联网 发布:官路淘宝 下载 编辑:程序博客网 时间:2024/04/30 15:12

为了防止用户对网站页面刷新过于频繁,需要对这种恶意操作进行判断并且屏蔽.虽然公司要有这样的一个功能,但是我觉得太没有必要了.只要你服务器够好,你何必需要这样的功能呢?下面是全部代码(仅供大家参考,我觉得实际意义不是很大):

import java.io.IOException;import java.util.Map;import java.util.concurrent.ConcurrentHashMap;import java.util.concurrent.CopyOnWriteArrayList;import javax.servlet.Filter;import javax.servlet.FilterChain;import javax.servlet.FilterConfig;import javax.servlet.ServletException;import javax.servlet.ServletRequest;import javax.servlet.ServletResponse;import javax.servlet.http.HttpServletRequest;import javax.servlet.http.HttpServletResponse;import com.f139.frame.util.NetUtil;public class RefreshFilter implements Filter {private static final Map<String, Integer> ipcount = new ConcurrentHashMap<String, Integer>();@Overridepublic void doFilter(ServletRequest request, ServletResponse response,FilterChain chain) throws IOException, ServletException {try {filter((HttpServletRequest) request,(HttpServletResponse) response, chain);} catch (InterruptedException e) {e.printStackTrace();}}private void filter(HttpServletRequest request,HttpServletResponse response, FilterChain chain)throws IOException, ServletException, InterruptedException {response.setCharacterEncoding("UTF-8");request.setCharacterEncoding("UTF-8");// 获得用户的IP地址,根据用户IP地址来判断此用户是否刷新过于频繁String userIP = NetUtil.getIpAddr(request);Cache cache = Cache.getInstance();cache.increment(userIP);if (cache.isUpCount(userIP)) {Integer count = ipcount.get(userIP);if (count != null) {ipcount.put(userIP, count + 1);System.out.println(ipcount.get(userIP));} else {count = 0;ipcount.put(userIP, count + 1);}if (ipcount.get(userIP) > 3) {response.getWriter().println("很抱歉,您操作过于频繁.");//403页面((HttpServletResponse) response).sendError(HttpServletResponse.SC_FORBIDDEN);/** * 在这里可以使用quartz工作调度对map进行定时的清理,时被禁止的用户可以重新访问本页面 */return;}response.getWriter().println("操作频繁,请3秒后再试");return;}chain.doFilter(request, response);}@Overridepublic void destroy() {}@Overridepublic void init(FilterConfig config) throws ServletException {}private static class Cache {private static final ConcurrentHashMap<String, CopyOnWriteArrayList<Long>> map = new ConcurrentHashMap<String, CopyOnWriteArrayList<Long>>();// 用户闲置时间private static final long EXPIRE_TIME = 1000 * 5L;// 用户频繁刷新次数上限,第六次就禁止刷新private static final int MAX_COUNT = 5;private static final Cache cache = new Cache();private Cache() {new Thread(new ClearCacheThread()).start();}public static Cache getInstance() {return cache;}// 每次刷新页面的时候就在缓存中增加一个刷新时间点(标识刷新次数)public void increment(String key) {CopyOnWriteArrayList<Long> list = map.get(key);if (list == null) {map.put(key, new CopyOnWriteArrayList<Long>());}map.get(key).add(new Long(System.currentTimeMillis()));}// 是否到达指定数量public boolean isUpCount(String key) {CopyOnWriteArrayList<Long> list = map.get(key);if (list == null) {return false;}return list.size() > MAX_COUNT;}// 清理过期数据线程private static class ClearCacheThread implements Runnable {@Overridepublic void run() {while (true) {try {// 当页面禁止刷新时让线程睡眠0.6秒钟,防止用户在禁止时还不断刷新.3秒(需要清除5个记录)之后再刷新就可以了Thread.sleep(600);// 判断当前用户的刷新时间和当前的系统时间间隔是否超过了闲置时间,如果超过则从缓存中清除记录(就不需要禁止刷新了)for (String key : map.keySet()) {CopyOnWriteArrayList<Long> list = map.get(key);for (Long date : list) {if (System.currentTimeMillis() - date > Cache.EXPIRE_TIME) {System.out.println(list.remove(date));}}}} catch (InterruptedException e) {e.printStackTrace();}}}}}}
上面这段代码其实问题还是很多的,比如如果用户比较多的话,就会有很多的线程存在,线程过多服务器性能肯定会受影响.还有那个CopyOnWriteArrayList性能也不是很高,但是我试了很多的list只有CopyOnWriteArrayList可以实现上述功能,唉,CopyOnWriteArrayList的原理还不是很清楚,如果哪位同仁有高见欢迎拍砖.

其实实现这个功能最好的方法是用iptable防火墙来做,但是我没试过.希望有这个经验的可以分享一下.