mirror of
https://gitlab.com/oeffi/public-transport-enabler.git
synced 2025-07-07 16:48:49 +00:00
handle HASESSIONID cookie
git-svn-id: https://public-transport-enabler.googlecode.com/svn/trunk@668 0924bc21-9374-b0fa-ee44-9ff1593b38f0
This commit is contained in:
parent
1cf6a3e0e0
commit
179675506d
4 changed files with 32 additions and 10 deletions
|
@ -1396,7 +1396,7 @@ public abstract class AbstractEfaProvider implements NetworkProvider
|
|||
InputStream is = null;
|
||||
try
|
||||
{
|
||||
is = ParserUtils.scrapeInputStream(uri);
|
||||
is = ParserUtils.scrapeInputStream(uri, null, true, 3);
|
||||
return queryConnections(uri, is);
|
||||
}
|
||||
catch (final XmlPullParserException x)
|
||||
|
@ -1415,7 +1415,7 @@ public abstract class AbstractEfaProvider implements NetworkProvider
|
|||
InputStream is = null;
|
||||
try
|
||||
{
|
||||
is = ParserUtils.scrapeInputStream(uri);
|
||||
is = ParserUtils.scrapeInputStream(uri, null, true, 3);
|
||||
return queryConnections(uri, is);
|
||||
}
|
||||
catch (final XmlPullParserException x)
|
||||
|
|
|
@ -204,7 +204,7 @@ public abstract class AbstractHafasProvider implements NetworkProvider
|
|||
InputStream is = null;
|
||||
try
|
||||
{
|
||||
is = ParserUtils.scrapeInputStream(apiUri, wrap(request), 3);
|
||||
is = ParserUtils.scrapeInputStream(apiUri, wrap(request), false, 3);
|
||||
|
||||
final List<Location> results = new ArrayList<Location>();
|
||||
|
||||
|
@ -342,7 +342,7 @@ public abstract class AbstractHafasProvider implements NetworkProvider
|
|||
|
||||
try
|
||||
{
|
||||
is = ParserUtils.scrapeInputStream(apiUri, wrap(request), 3);
|
||||
is = ParserUtils.scrapeInputStream(apiUri, wrap(request), false, 3);
|
||||
|
||||
final XmlPullParserFactory factory = XmlPullParserFactory.newInstance(System.getProperty(XmlPullParserFactory.PROPERTY_NAME), null);
|
||||
final XmlPullParser pp = factory.newPullParser();
|
||||
|
@ -614,7 +614,7 @@ public abstract class AbstractHafasProvider implements NetworkProvider
|
|||
|
||||
try
|
||||
{
|
||||
is = ParserUtils.scrapeInputStream(apiUri, wrap(request), 3);
|
||||
is = ParserUtils.scrapeInputStream(apiUri, wrap(request), false, 3);
|
||||
|
||||
final XmlPullParserFactory factory = XmlPullParserFactory.newInstance(System.getProperty(XmlPullParserFactory.PROPERTY_NAME), null);
|
||||
final XmlPullParser pp = factory.newPullParser();
|
||||
|
|
|
@ -92,9 +92,7 @@ public final class ParserUtils
|
|||
connection.addRequestProperty("Cache-Control", "no-cache");
|
||||
|
||||
if (cookieHandling && stateCookie != null)
|
||||
{
|
||||
connection.addRequestProperty("Cookie", stateCookie);
|
||||
}
|
||||
|
||||
if (request != null)
|
||||
{
|
||||
|
@ -124,7 +122,7 @@ public final class ParserUtils
|
|||
{
|
||||
for (final String value : entry.getValue())
|
||||
{
|
||||
if (value.startsWith("NSC_"))
|
||||
if (value.startsWith("NSC_") || value.startsWith("HASESSIONID"))
|
||||
{
|
||||
stateCookie = value.split(";", 2)[0];
|
||||
}
|
||||
|
@ -169,10 +167,11 @@ public final class ParserUtils
|
|||
|
||||
public static final InputStream scrapeInputStream(final String url) throws IOException
|
||||
{
|
||||
return scrapeInputStream(url, null, 3);
|
||||
return scrapeInputStream(url, null, false, 3);
|
||||
}
|
||||
|
||||
public static final InputStream scrapeInputStream(final String url, final String postRequest, int tries) throws IOException
|
||||
public static final InputStream scrapeInputStream(final String url, final String postRequest, final boolean cookieHandling, int tries)
|
||||
throws IOException
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
|
@ -187,6 +186,9 @@ public final class ParserUtils
|
|||
// workaround to disable Vodafone compression
|
||||
connection.addRequestProperty("Cache-Control", "no-cache");
|
||||
|
||||
if (cookieHandling && stateCookie != null)
|
||||
connection.addRequestProperty("Cookie", stateCookie);
|
||||
|
||||
if (postRequest != null)
|
||||
{
|
||||
connection.setRequestMethod("POST");
|
||||
|
@ -204,6 +206,24 @@ public final class ParserUtils
|
|||
final String contentEncoding = connection.getContentEncoding();
|
||||
final InputStream is = connection.getInputStream();
|
||||
|
||||
if (cookieHandling)
|
||||
{
|
||||
for (final Map.Entry<String, List<String>> entry : connection.getHeaderFields().entrySet())
|
||||
{
|
||||
if ("set-cookie".equalsIgnoreCase(entry.getKey()))
|
||||
{
|
||||
for (final String value : entry.getValue())
|
||||
{
|
||||
if (value.startsWith("NSC_") || value.startsWith("HASESSIONID"))
|
||||
{
|
||||
stateCookie = value.split(";", 2)[0];
|
||||
System.out.println(stateCookie);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ("gzip".equalsIgnoreCase(contentEncoding))
|
||||
return new GZIPInputStream(is);
|
||||
|
||||
|
|
|
@ -116,6 +116,8 @@ public class GvhProviderLiveTest
|
|||
final QueryConnectionsResult result = provider.queryConnections(new Location(LocationType.STATION, 25000031, null, "Hannover Hauptbahnhof"),
|
||||
null, new Location(LocationType.STATION, 25001141, null, "Hannover Bismarckstraße"), new Date(), true, ALL_PRODUCTS, WalkSpeed.FAST);
|
||||
System.out.println(result);
|
||||
final QueryConnectionsResult moreResult = provider.queryMoreConnections(result.context);
|
||||
System.out.println(moreResult);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue