package org.autoplot.hapi;

import java.awt.EventQueue;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.das2.datum.DatumRange;
import org.das2.datum.TimeParser;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.autoplot.datasource.AutoplotSettings;
import org.das2.datum.Datum;
import org.das2.datum.DatumRangeUtil;
import org.das2.datum.TimeUtil;
import org.das2.datum.Units;
import org.das2.util.filesystem.FileSystem;

/**
 * Utility methods for interacting with HAPI servers.  
 * @author jbf
 */
public class HapiServer {
    
    protected final static Logger logger= Logger.getLogger("apdss.hapi");
    
    /**
     * this logger is for opening connections to remote sites.
     */
    protected static final Logger loggerUrl= org.das2.util.LoggerManager.getLogger( "das2.url" );
    
    /**
     * get known servers.  
     * @return known servers
     */
    public static List<String> getKnownServers() {
        ArrayList<String> result= new ArrayList<>();
        try {
            URL url= new URL("https://raw.githubusercontent.com/hapi-server/servers/master/server_list.txt");
            try {
                String s= readFromURL(url,"");
                String[] ss= s.split("\n");
                result.addAll(Arrays.asList(ss));
            } catch ( IOException ex ) {
                url= new URL("https://raw.githubusercontent.com/hapi-server/servers/master/all.txt");
                String s= readFromURL(url,"");
                String[] ss= s.split("\n");
                result.addAll(Arrays.asList(ss));
            }
            if ( "true".equals(System.getProperty("hapiDeveloper","false")) ) {
                result.add("http://tsds.org/get/IMAGE/PT1M/hapi");
                result.add("https://cdaweb.gsfc.nasa.gov/registry/hdp/hapi");
                result.add("http://jfaden.net/HapiServerDemo/hapi");
            }
        } catch (IOException  ex) {
            Logger.getLogger(HapiServer.class.getName()).log(Level.SEVERE, null, ex);
        }
        result.remove("http://datashop.elasticbeanstalk.com/hapi");
        result.add("http://datashop.elasticbeanstalk.com/hapi");
        
        ArrayList<String> uniq= new ArrayList<>();
        for ( String s: result ) {
            if ( !uniq.contains(s) ) uniq.add(s);
        }

        return uniq;
    }
    
    /**
     * get known servers
     * @return known servers
     */
    public static String[] getKnownServersArray() {
        List<String> result= getKnownServers();
        return result.toArray( new String[result.size()] );
    }
     
    /**
     * add the default known servers, plus the ones we know about.
     * @return list of servers
     */
    public static String[] listHapiServersArray() {
        List<String> result= listHapiServers();
        return result.toArray( new String[result.size()] );        
    }
    
    /**
     * add the default known servers, plus the ones we know about.  
     * The zeroth server will be the last server used.
     * This should not be called from the event thread.
     * 
     * @return list of server URLs.
     */
    public static List<String> listHapiServers() {
        if ( EventQueue.isDispatchThread() ) {
            logger.warning("HAPI network call on event thread");
        }        
        List<String> d2ss1= new ArrayList( );
        d2ss1.addAll( getKnownServers() );

        File home = new File(AutoplotSettings.settings().resolveProperty(AutoplotSettings.PROP_AUTOPLOTDATA));
        File book = new File(home, "bookmarks");
        File hist = new File(book, "history.txt");
        long t0= System.currentTimeMillis();
        logger.log( Level.FINE, "reading recent datasources from {0}", hist.toString());

        if ( hist.exists() ) {
            BufferedReader r=null;
            try {
                String seek="hapi:";
                int ttaglen= 25;
                r = new BufferedReader(new FileReader(hist));
                String s = r.readLine();
                LinkedHashSet dss = new LinkedHashSet();

                while (s != null) {
                    if ( s.length()>ttaglen+15 && s.substring(ttaglen+4,ttaglen+9).equalsIgnoreCase(seek)) {
                        int i= s.indexOf("?");
                        if ( i==-1 ) i= s.length();
                        String key= s.substring(ttaglen+4+seek.length(),i);
                        if ( dss.contains(key) ) dss.remove( key ); // move to the end
                        dss.add( key );
                    }
                    s = r.readLine();
                }

                d2ss1.removeAll(dss);  // remove whatever we have already
                List<String> d2ssDiscoveryList= new ArrayList(dss);
                Collections.reverse( d2ssDiscoveryList );
                d2ssDiscoveryList.addAll(d2ss1);
                d2ss1= d2ssDiscoveryList; // put the most recently used ones at the front of the list
                
                logger.log( Level.FINE, "read extra hapi servers in {0} millis\n", (System.currentTimeMillis()-t0) );
            } catch ( IOException ex ) {
                
            } finally {
                try {
                    if ( r!=null ) r.close();
                } catch (IOException ex) {
                    logger.log( Level.SEVERE, ex.getMessage(), ex );
                }
            }
        } else {
            logger.log( Level.FINE, "no history file found: {0}", hist );
        }
                
        return d2ss1;

    }
    
    /**
     * return the list of datasets available at the server.
     * This should not be called from the event thread.
     * @param server the root of the server, which should should contain "catalog"
     * @return list of dataset ids
     * @throws java.io.IOException
     * @throws org.json.JSONException
     */
    public static List<String> getCatalogIds( URL server ) throws IOException, JSONException {
        if ( EventQueue.isDispatchThread() ) {
            logger.warning("HAPI network call on event thread");
        }        
        URL url;
        url= HapiServer.createURL( server, "catalog" );
        String s= readFromURL(url, "json");
        JSONObject o= new JSONObject(s);
        JSONArray catalog= o.getJSONArray( HapiSpec.CATALOG );
        List<String> result= new ArrayList<>(catalog.length());
        for ( int i=0; i<catalog.length(); i++ ) {
            result.add(i,catalog.getJSONObject(i).getString(HapiSpec.URL_PARAM_ID) );
        }
        return result;
    }
     
    /**
     * return the list of datasets available at the server.  
     * This should not be called from the event thread.
     * @param server the root of the server, which should should contain "catalog"
     * @return list of catalog entries, which have "id" and "title" tags.
     * @throws java.io.IOException
     * @throws org.json.JSONException
     */
    public static JSONArray getCatalog( URL server ) throws IOException, JSONException {
        if ( EventQueue.isDispatchThread() ) {
            logger.warning("HAPI network call on event thread");
        }        
        URL url;
        url= HapiServer.createURL( server, HapiSpec.CATALOG_URL  );
        String s= readFromURL(url, "json");
        JSONObject o= new JSONObject(s);
        JSONArray catalog= o.getJSONArray( HapiSpec.CATALOG );
        return catalog;
    }
    
    /**
     * return the URL for getting info.
     * @param server
     * @param id
     * @return 
     */
    public static URL getInfoURL( URL server, String id ) {
        URL url= HapiServer.createURL(server, HapiSpec.INFO_URL, Collections.singletonMap(HapiSpec.URL_PARAM_ID, id) );
        return url;
    }
    
    /**
     * return the URL for data requests.
     * @param server
     * @param id string like "data4" or "spase://..."
     * @param tr the time range
     * @param parameters zero-length, or a comma-delineated list of parameters.
     * @return the request, with the ID and parameters URL encoded.
     */
    public static URL getDataURL( URL server, String id, DatumRange tr, String parameters ) {
        TimeParser tp= TimeParser.create("$Y-$m-$dT$H:$M:$SZ");
        HashMap<String,String> map= new LinkedHashMap();
        map.put(HapiSpec.URL_PARAM_ID, id );
        map.put(HapiSpec.URL_PARAM_TIMEMIN, tp.format(tr.min()) );
        map.put(HapiSpec.URL_PARAM_TIMEMAX, tp.format(tr.max()) );
        if ( parameters.length()>0 ) {
            map.put(HapiSpec.URL_PARAM_PARAMETERS, parameters );
        }
        URL serverUrl= createURL(server, HapiSpec.DATA_URL, map );
        return serverUrl;
    }
        
    /**
     * return the URL by appending the text to the end of the server URL.  This
     * avoids extra slashes, etc.
     * @param server
     * @param append
     * @return 
     */
    public static URL createURL( URL server, String append ) {
        return createURL( server, append, null );
    }
    
    /**
     * make sure spaces are encoded.
     * @param id
     * @return 
     */
    public static String urlEncode( String id ) {
        try {
            return URLEncoder.encode( id, "UTF-8" );
        } catch (UnsupportedEncodingException ex) {
            throw new IllegalArgumentException(ex);
        }
    }

    public static JSONArray getParameters(URL server, String id) throws IOException, JSONException {
        JSONObject o= getInfo( server, id );
        JSONArray catalog= o.getJSONArray(HapiSpec.PARAMETERS);
        return catalog;
    }
    
    /**
     * return the info as a JSONObject.
     * This should not be called from the event thread.
     * @param server HAPI server.
     * @param id the parameter id.
     * @return JSONObject containing information.
     * @throws IOException
     * @throws JSONException 
     */
    public static JSONObject getInfo( URL server, String id) throws IOException, JSONException {
        if ( EventQueue.isDispatchThread() ) {
            logger.warning("HAPI network call on event thread");
        }
        URL url;
        url= HapiServer.createURL(server, HapiSpec.INFO_URL, Collections.singletonMap(HapiSpec.URL_PARAM_ID, id ) );
        logger.log(Level.FINE, "getInfo {0}", url.toString());
        String s= readFromURL(url, "json");
        JSONObject o= new JSONObject(s);
        return o;
    }
    
    /**
     * return the server capabilities document.  
     * This should not be called from the event thread.
     * @param server HAPI server.
     * @return JSONObject containing capabilities.
     * @throws IOException
     * @throws JSONException 
     */
    protected static JSONObject getCapabilities(URL server)  throws IOException, JSONException {
        if ( EventQueue.isDispatchThread() ) {
            logger.warning("HAPI network call on event thread");
        }
        URL url;
        url= HapiServer.createURL(server, HapiSpec.CAPABILITIES_URL);
        String s= readFromURL(url, "json" );
        JSONObject o= new JSONObject(s);
        return o;
    }

    /**
     * use cache of HAPI responses, to allow for use in offline mode.
     * @return 
     */
    protected static boolean useCache() {
        return ( "true".equals( System.getProperty("hapiServerCache","false") ) );
    }
    
    /**
     * allow cached files to be used for no more than 1 hour.
     * @return 
     */
    protected static long cacheAgeLimitMillis() {
        return 3600000;
    }
    
    /**
     * return the resource, if cached, or null if the data is not cached.
     * @param url
     * @param type "json" (the extension) or "" for no additional extension.
     * @return the data or null.
     * @throws IOException 
     */
    public static String readFromCachedURL( URL url, String type ) throws IOException {
        String s= AutoplotSettings.settings().resolveProperty(AutoplotSettings.PROP_FSCACHE);
        if ( s.endsWith("/") ) s= s.substring(0,s.length()-1);
        String u= url.getProtocol() + "/" + url.getHost() + "/" + url.getPath();
        if ( url.getQuery()!=null ) {
            Pattern p= Pattern.compile("id=(.+)");
            Matcher m= p.matcher(url.getQuery());
            if ( m.matches() ) {
                u= u + "/" + m.group(1);
                if ( type.length()>0 ) u= u+ "." + type;
            } else {
                throw new IllegalArgumentException("query not supported, implementation error");
            }
        } else {
            if ( type.length()>0 ) u= u + "." + type;
        }
        String su= s + "/hapi/" + u;
        File f= new File(su);
        if ( f.exists() && f.canRead() ) {
            if ( ( System.currentTimeMillis() - f.lastModified() < cacheAgeLimitMillis() ) || FileSystem.settings().isOffline() ) {
                logger.log(Level.FINE, "read from hapi cache: {0}", url);
                String r= readFromFile( f );
                return r;
            } else {
                logger.log(Level.FINE, "old cache item will not be used: {0}", url);
                return null;
            }
        } else {
            return null;
        }
    }

    /**
     * write the data (for example, an info response) to a cache file.  This is called
     * from readFromURL to cache the data.
     * @param url the resource location, query param id is handled specially, but others are ignored.
     * @param type "json" (the extension), or "" if no extension should be added.
     * @param data the data.
     * @throws IOException 
     */
    public static void writeToCachedURL( URL url, String type, String data ) throws IOException {
        String s= AutoplotSettings.settings().resolveProperty(AutoplotSettings.PROP_FSCACHE);
        if ( s.endsWith("/") ) s= s.substring(0,s.length()-1);
        String u= url.getProtocol() + "/" + url.getHost() + "/" + url.getPath();
        if ( url.getQuery()!=null ) {
            Pattern p= Pattern.compile("id=(.+)");
            Matcher m= p.matcher(url.getQuery());
            if ( m.matches() ) {
                u= u + "/" + m.group(1);
                if ( type.length()>0 ) {
                    u= u + "." + type;
                }
            } else {
                throw new IllegalArgumentException("query not supported, implementation error");
            }
        } else {
            if ( type.length()>0 ) {
                u= u + "." + type;
            }
        }
        
        String su= s + "/hapi/" + u;
        File f= new File(su);
        if ( f.exists() ) {
            f.delete();
        }
        if ( !f.getParentFile().exists() ) {
            if ( !f.getParentFile().mkdirs() ) {
                throw new IOException("unable to make parent directories");
            }
        }
        if ( !f.exists() ) {
            logger.log(Level.FINE, "write to hapi cache: {0}", url);
            try ( BufferedWriter w= new BufferedWriter( new FileWriter(f) ) ) {
                w.write(data);
            }
        } else {
            throw new IOException("unable to write to file: "+f);
        }
    }
    
    private static final Lock lock= new ReentrantLock();
    
    /**
     * read the file into a string.  
     * @param f non-empty file
     * @return String containing file contents.
     * @throws IOException 
     */
    public static String readFromFile( File f ) throws IOException {
        StringBuilder builder= new StringBuilder();
        try ( BufferedReader in= new BufferedReader( new InputStreamReader( new FileInputStream(f) ) ) ) {
            String line= in.readLine();
            while ( line!=null ) {
                builder.append(line);
                builder.append("\n");
                line= in.readLine();
            }
        }
        if ( builder.length()==0 ) {
            throw new IOException("file is empty:" + f );
        }
        String result=builder.toString();
        return result;
    }
    
    /**
     * read data from the URL.  
     * @param url the URL to read from
     * @param type the extension to use for the cache file (JSON).
     * @return non-empty string
     * @throws IOException 
     */
    public static String readFromURL( URL url, String type) throws IOException {
        
        if ( FileSystem.settings().isOffline() ) {
            String s= readFromCachedURL( url, type );
            if ( s!=null ) return s;
        }
        loggerUrl.log(Level.FINE, "GET {0}", new Object[] { url } );
        URLConnection urlc= url.openConnection();
        urlc.setConnectTimeout( FileSystem.settings().getConnectTimeoutMs() );
        urlc.setReadTimeout( FileSystem.settings().getReadTimeoutMs() );
        StringBuilder builder= new StringBuilder();
        try ( BufferedReader in= new BufferedReader( new InputStreamReader( urlc.getInputStream() ) ) ) {
            String line= in.readLine();
            while ( line!=null ) {
                builder.append(line);
                builder.append("\n");
                line= in.readLine();
            }
        } catch ( IOException ex ) {
            logger.log( Level.FINE, ex.getMessage(), ex );
            lock.lock();
            try {
                if ( useCache() ) {
                    String s= readFromCachedURL( url, type );
                    if ( s!=null ) return s;
                } else {
                    throw ex;
                }
            } finally {
                lock.unlock();
            }
        }
        
        if ( builder.length()==0 ) {
            throw new IOException("empty response from "+url );
        }
        String result=builder.toString();
        
        lock.lock();
        try {
            if ( useCache() ) {
                writeToCachedURL( url, type, result );
            }
        } finally {
            lock.unlock();
        }
        return result;
    }
    
    /**
     * return the URL by appending the text to the end of the server URL.  This
     * avoids extra slashes, etc.
     * @param server the hapi server
     * @param append the folder to append.
     * @param singletonMap parameters to append.
     * @return the url.
     */
    public static URL createURL(URL server, String append, Map<String, String> singletonMap) {
        StringBuilder s= new StringBuilder( server.toString() );
        if ( append.startsWith("/") ) {
            append= append.substring(1);
        }
        if ( s.substring(s.length()-1).equals("/") ) {
            s= s.append( append );
        } else {
            s= s.append("/").append( append );
        }
        if ( singletonMap!=null && !singletonMap.isEmpty() ) {
            boolean firstArg= true;
            for ( Entry<String,String> entry: singletonMap.entrySet() ) {
                if ( entry.getValue()!=null ) {
                    if ( firstArg ) {
                        s.append("?");
                        firstArg=false;
                    } else {
                        s.append("&");
                    }
                    String svalue;
                    if ( entry.getKey().equals(HapiSpec.URL_PARAM_TIMEMIN) || entry.getKey().equals(HapiSpec.URL_PARAM_TIMEMAX) ) {
                        svalue= entry.getValue();  // the colons are needed on CDAWeb server.
                    } else {
                        svalue= urlEncode( entry.getValue() );
                    }
                    s.append(entry.getKey()).append("=").append( svalue );
                }
            }
        }
        try {
            return new URL(s.toString());
        } catch ( MalformedURLException ex ) {
            throw new IllegalArgumentException(ex);
        }
    }
    
    /**
     * [yr,mon,day,hour,min,sec,nanos]
     * @param array
     * @return approximate seconds
     */
    private static Datum cadenceArrayToDatum( int[] array ) {
        double seconds= array[6]/1e9;
        seconds+= array[5];
        seconds+= array[4]*60;
        seconds+= array[3]*3600;
        seconds+= array[2]*86400; //approx, just to get scale
        seconds+= array[1]*86400*30; //approx, just to get scale
        seconds+= array[0]*86400*365; // approx, just to get scale
        return Units.seconds.createDatum(seconds);
    }
    
    
    /**
     * return the range of available data. For example, Polar/Hydra data is available
     * from 1996-03-20 to 2008-04-15.  Note this supports old schemas.
     * @param info
     * @return the range of available data.
     */
    public static DatumRange getRange( JSONObject info ) {
        try {
            if ( info.has("firstDate") && info.has("lastDate") ) { // this is deprecated behavior
                String firstDate= info.getString("firstDate");
                String lastDate= info.getString("lastDate");
                if ( firstDate!=null && lastDate!=null ) {
                    Datum t1= Units.us2000.parse(firstDate);
                    Datum t2= Units.us2000.parse(lastDate);
                    if ( t1.le(t2) ) {
                        return new DatumRange( t1, t2 );
                    } else {
                        logger.warning( "firstDate and lastDate are out of order, ignoring.");
                    }
                }
            } else if ( info.has("startDate") ) { // note startDate is required.
                String startDate= info.getString("startDate");
				String stopDate;
				if ( info.has("stopDate") ) {
					stopDate= info.getString("stopDate");
				} else {
					stopDate= null;
				}
                if ( startDate!=null ) {
                    Datum t1= Units.us2000.parse(startDate);
                    Datum t2= Units.us2000.parse(stopDate);
                    if ( t1.le(t2) ) {
                        return new DatumRange( t1, t2 );
                    } else {
                        logger.warning( "firstDate and lastDate are out of order, ignoring.");
                    }
                }
			}
        } catch ( JSONException | ParseException ex ) {
            logger.log( Level.WARNING, ex.getMessage(), ex );
        }
        return null;
    }    

    /**
     * return a time which is a suitable time to discover the data.
     * @param info
     * @return 
     */
    public static DatumRange getSampleTimeRange( JSONObject info ) throws JSONException {
        DatumRange range= getRange(info);
        if ( range==null ) {
            logger.warning("server is missing required startDate and stopDate parameters.");
            throw new IllegalArgumentException("here fail");
        } else {
            DatumRange sampleRange=null;
            if ( info.has("sampleStartDate") && info.has("sampleStopDate") ) {
                try {
                    sampleRange = new DatumRange( Units.us2000.parse(info.getString("sampleStartDate")), Units.us2000.parse(info.getString("sampleStopDate")) );
                } catch (JSONException | ParseException ex) {
                    logger.log(Level.SEVERE, null, ex);
                }
            } 
            if ( sampleRange==null ) {
                Datum cadence= Units.seconds.createDatum(60);  // assume default cadence of 1 minute results in 1 day sample range.
                if ( info.has("cadence") ) {
                    try{
                        int[] icadence= DatumRangeUtil.parseISO8601Duration(info.getString("cadence"));
                        cadence= cadenceArrayToDatum(icadence);
                    } catch ( ParseException ex ) {
                        logger.log(Level.WARNING, "parse error in cadence: {0}", info.getString("cadence"));
                    }
                }    
                if ( false ) { // range.max().ge(myValidTime)) { // Note stopDate is required since 2017-01-17.
                    logger.warning("server is missing required stopDate parameter.");
                    sampleRange = new DatumRange(range.min(), range.min().add(1, Units.days));
                } else {
                    if ( cadence.ge(Units.days.createDatum(1)) ) {
                        Datum end = TimeUtil.nextMidnight(range.max());
                        end= end.subtract( 10,Units.days );
                        if ( range.max().subtract(end).ge( Datum.create(1,Units.days ) ) ) {
                            sampleRange = new DatumRange( end, end.add(10,Units.days) );
                        } else {
                            sampleRange = new DatumRange( end.subtract(10,Units.days), end );
                        } 
                    } else if ( cadence.ge(Units.seconds.createDatum(1)) ) {
                        Datum end = TimeUtil.prevMidnight(range.max());
                        if ( range.max().subtract(end).ge( Datum.create(1,Units.hours ) ) ) {
                            sampleRange = new DatumRange( end, end.add(1,Units.days) );
                        } else {
                            sampleRange = new DatumRange( end.subtract(1,Units.days), end );
                        } 
                    } else {
                        Datum end = TimeUtil.prev( TimeUtil.HOUR, range.max() );
                        if ( range.max().subtract(end).ge( Datum.create(1,Units.minutes ) ) ) {
                            sampleRange = new DatumRange( end, end.add(1,Units.hours) );
                        } else {
                            sampleRange = new DatumRange( end.subtract(1,Units.hours), end );
                        } 
                    }
                    if ( !sampleRange.intersects(range) ) {
                        sampleRange= sampleRange.next();
                    }
                }
            }
            return sampleRange;                
        }
    }
}