From 68242f6579a12380711f46a40b587dfd84a18786 Mon Sep 17 00:00:00 2001 From: sheldonabrown Date: Mon, 10 Jun 2024 13:41:36 -0400 Subject: [PATCH] MOTP-2144 allow for differing patterns in UpdateCalendarDatesForDuplicateTrips --- .../UpdateCalendarDatesForDuplicateTrips.java | 488 +++++++++--------- .../onebusaway/gtfs/model/DuplicateTrips.java | 89 ---- 2 files changed, 236 insertions(+), 341 deletions(-) delete mode 100644 onebusaway-gtfs/src/main/java/org/onebusaway/gtfs/model/DuplicateTrips.java diff --git a/onebusaway-gtfs-transformer/src/main/java/org/onebusaway/gtfs_transformer/impl/UpdateCalendarDatesForDuplicateTrips.java b/onebusaway-gtfs-transformer/src/main/java/org/onebusaway/gtfs_transformer/impl/UpdateCalendarDatesForDuplicateTrips.java index dea177d0b..11cefd890 100644 --- a/onebusaway-gtfs-transformer/src/main/java/org/onebusaway/gtfs_transformer/impl/UpdateCalendarDatesForDuplicateTrips.java +++ b/onebusaway-gtfs-transformer/src/main/java/org/onebusaway/gtfs_transformer/impl/UpdateCalendarDatesForDuplicateTrips.java @@ -17,7 +17,6 @@ package org.onebusaway.gtfs_transformer.impl; import org.onebusaway.gtfs.model.*; -import org.onebusaway.gtfs.model.calendar.ServiceDate; import org.onebusaway.gtfs.services.GtfsMutableRelationalDao; import org.onebusaway.gtfs_transformer.services.GtfsTransformStrategy; import org.onebusaway.gtfs_transformer.services.TransformContext; @@ -25,7 +24,15 @@ import org.slf4j.LoggerFactory; import java.util.*; +import java.util.stream.Collectors; +/** + * Map ATIS trip_ids to mta_trips_ids while de-duplicating. + * Tag each "duplicate" trip with an ATIS id to force it unique if the stopping pattern differs. + * Otherwise create new service_ids representing the service of the duplicates, + * adding to an exemplar trip and deleting the duplicates. + * + */ public class UpdateCalendarDatesForDuplicateTrips implements GtfsTransformStrategy { private final Logger _log = LoggerFactory.getLogger(UpdateCalendarDatesForDuplicateTrips.class); @@ -35,307 +42,284 @@ public String getName() { return this.getClass().getSimpleName(); } - @Override public void run(TransformContext context, GtfsMutableRelationalDao dao) { - RemoveEntityLibrary removeEntityLibrary = new RemoveEntityLibrary(); - String agency = dao.getAllTrips().iterator().next().getId().getAgencyId(); + if (dao == null || dao.getAllTrips().isEmpty()) { + throw new IllegalStateException("nothing to do!"); + } + String calendarAgencyId = dao.getAllTrips().iterator().next().getId().getAgencyId(); + DuplicateState state = new DuplicateState(dao, calendarAgencyId); //map of each mta_trip_id and list of trips - HashMap> tripsMap = new HashMap<>(); - //List of DuplicateTrips - ArrayList duplicateTripData = new ArrayList<>(); + HashMap> tripsByMtaTripId = buildTripMap(state, dao); - //they are only duplicates if the stop times match as well. - //if the stop times match, then we can move forward with merging trips - //if not, then we can't merge and we leave the trips alone + // we only use this for informational logging, we don't actually compare to reference + GtfsMutableRelationalDao reference = (GtfsMutableRelationalDao) context.getReferenceReader().getEntityStore(); - //set all the trips that are duplicates based on mta_trip_id - int mtaIdNull = 0; - for (Trip trip : dao.getAllTrips()) { - if (trip.getMtaTripId() != null) { - if (tripsMap.containsKey(trip.getMtaTripId())) { - ArrayList trips = tripsMap.get(trip.getMtaTripId()); - trips.add(trip); - tripsMap.put(trip.getMtaTripId(), trips); - } else { - ArrayList trips = new ArrayList<>(); - trips.add(trip); - tripsMap.put(trip.getMtaTripId(), trips); - } - } else { - _log.info("trip {} mta_trip_id is null", trip.getId()); - mtaIdNull++; - } + HashMap referenceTripsByTripIdByTripId = new HashMap<>(); + for (Trip trip : reference.getAllTrips()) { + referenceTripsByTripIdByTripId.put(trip.getId().getId(), trip); } - GtfsMutableRelationalDao reference = (GtfsMutableRelationalDao) context.getReferenceReader().getEntityStore(); + logDuplicates(tripsByMtaTripId, referenceTripsByTripIdByTripId); - HashMap referenceTrips = new HashMap<>(); - for (Trip trip : reference.getAllTrips()) { - referenceTrips.put(trip.getId().getId(), trip); + _log.info("Incoming Routes: {} Trips: {} Stops: {} Stop times: {} CalDatess: {} ", dao.getAllRoutes().size(), dao.getAllTrips().size(), dao.getAllStops().size(), dao.getAllStopTimes().size(), dao.getAllCalendarDates().size()); + + // perform the computations, but application of them is delayed till later + update(dao, state, tripsByMtaTripId); + // apply the changes + state.apply(); + + _log.info("Outgoing Routes: {} Trips: {} Stops: {} Stop times: {} CalDates: {} ", dao.getAllRoutes().size(), dao.getAllTrips().size(), dao.getAllStops().size(), dao.getAllStopTimes().size(), dao.getAllCalendarDates().size()); + _log.info("deleted trips: {} duplicate trip Ids: {} null ids {}", state.deletedTripCounter, state.duplicateTripIdCounter, state.mtaIdNullCounter); + } + + private void update(GtfsMutableRelationalDao dao, DuplicateState state, HashMap> tripsByMtaTripId) { + for (Map.Entry> entry : tripsByMtaTripId.entrySet()) { + update(state, entry.getKey(), entry.getValue()); + deDuplicate(dao, state, entry.getKey(), entry.getValue()); } + } - //this is just for logging if dups are in reference, delete when ready - /* Iterator entries2 = tripsMap.entrySet().iterator(); - while (entries2.hasNext()) { - HashMap.Entry entry = (HashMap.Entry) entries2.next(); - ArrayList trips = (ArrayList) entry.getValue(); - if (trips.size() > 1) { - //these are duplicates - if (referenceTrips.containsKey(entry.getKey())) { - //_log.info("Duplicate trip id {} is in reference", entry.getKey()); - } - } + private void update(DuplicateState state, String mtaTripId, ArrayList duplicateTrips) { + for (Trip duplicateTrip : duplicateTrips) { + String agencyId = duplicateTrip.getId().getAgencyId(); + String modifier = duplicateTrip.getId().getId(); + // if we change the id here we can't decide to delete it later + // instead map the change and do it later + state.addTripToTrack(duplicateTrip.getId(), new AgencyAndId(agencyId, mtaTripId + "-dup-" + modifier)); } -*/ - int orStopTimes = dao.getAllStopTimes().size(); - _log.info("Routes: {} Trips: {} Stops: {} Stop times: {} CalDatess: {} ", dao.getAllRoutes().size(), dao.getAllTrips().size(), dao.getAllStops().size(), dao.getAllStopTimes().size(), dao.getAllCalendarDates().size()); + } + + private void deDuplicate(GtfsMutableRelationalDao dao, DuplicateState state, String mtaTripId, ArrayList duplicateTrips) { + Map> patternHashToTripId = new HashMap<>(); + for (Trip duplicateTrip : duplicateTrips) { + String patternHash = hashPattern(dao, duplicateTrip); + if (!patternHashToTripId.containsKey(patternHash)) { + patternHashToTripId.put(patternHash, new ArrayList()); + } + patternHashToTripId.get(patternHash).add(duplicateTrip); + } - int countUnique = 0; - int countCombine = 0; - int countDoNothing = 0; - int countToday = 0; + deDuplicate(dao, state, mtaTripId, patternHashToTripId); + } - Iterator entries = tripsMap.entrySet().iterator(); - int service_id = getNextServiceId(dao); - while (entries.hasNext()) { - HashMap.Entry entry = (HashMap.Entry) entries.next(); - ArrayList trips = (ArrayList) entry.getValue(); + private void deDuplicate(GtfsMutableRelationalDao dao, DuplicateState state, String mtaTripId, Map> patternHashToTripId) { + // each pattern only needs one representative trip -- we don't care which -- and then multiple calendar entries + for (List trips : patternHashToTripId.values()) { if (trips.size() > 1) { - Boolean equals = true; - //do all the trips have identical stops? If yes, proceed and update calendar dates and stop times and the trip_id - //If not, leave the trip alone. Do nothing. - trip_loop: - for (int i = 0; i < trips.size(); i++) { - for (int j = i+1; j < trips.size(); j++) { - //if (!dao.getStopTimesForTrip(trips.get(i)).equals(dao.getStopTimesForTrip(trips.get(j))) ) { - //they won't be equal because a stop time has a trip id and the trip ids are different - if (!stopTimesEqual(dao.getStopTimesForTrip(trips.get(i)), dao.getStopTimesForTrip(trips.get(j)))) { - //_log.info("The stop times for {} and {} are not equal", trips.get(i).getId().getId(), trips.get(j).getId().getId()); - equals = false; - //so at this point the stop times don't equal. Do I check if its just one or just throw the whole thing out? - //For now if one doesn't match then none do and I'm going to ignore. - countDoNothing = countDoNothing + trips.size(); - - //check if any of the trips are today. If one of them is, then copy over the mta_id and ignore the duplicates - //so at least one trip will get the right id - if (checkForServiceToday(trips, dao)) { - countToday++; - } - break trip_loop; - } - } - } - if (equals) { - //_log.info("EQUALS!"); - //for each mta_id that is a duplicate, we need to ultimately delete those duplicates - //First, get all the corresponding serviceDates for all the trips with that mta_id, then create new service ids - //and add entries with that new id that correspond to all the service dates for all the trips - DuplicateTrips dup = new DuplicateTrips((String) entry.getKey(), Integer.toString(service_id), trips); - duplicateTripData.add(dup); - service_id++; - countCombine = countCombine + trips.size(); - } - } - else { - //trips.size is not > 1 so these trips are unique. Copy over mta_trip_id - Trip trip = trips.get(0); - trip.setId(new AgencyAndId(trip.getId().getAgencyId(), trip.getMtaTripId())); - countUnique++; + Trip exemplar = trips.remove(0); + deDuplicateTrip(dao, state, exemplar, trips); } } - _log.info("Mta_trip_ids: null {}, unique {}, do nothing {}, today {}, combine {}, total {}", mtaIdNull, countUnique, countDoNothing, countToday, countCombine, mtaIdNull+countUnique+countDoNothing+countCombine); - - //now we have a list of DuplicateTrips and we need to fill in the calendar dates - for (DuplicateTrips dts : duplicateTripData) { - for (Trip trip : dts.getTrips()) { - //for each trip, get the calendar dates - for (ServiceCalendarDate calDate : dao.getCalendarDatesForServiceId(trip.getServiceId())) { - dts.addServiceDate(calDate); + } + + private void deDuplicateTrip(GtfsMutableRelationalDao dao, DuplicateState state, Trip exemplar, List tripsToRemove) { + Set serviceIds = tripsToRemove.stream().map(l->l.getServiceId()).collect(Collectors.toSet()); + addServiceForTrip(dao, state, exemplar, serviceIds); + deleteTrips(dao, state, tripsToRemove); + } + + private void deleteTrips(GtfsMutableRelationalDao dao, DuplicateState state, List tripsToRemove) { + state.removeTrip(tripsToRemove); + } + + private void addServiceForTrip(GtfsMutableRelationalDao dao, DuplicateState state, Trip exemplar, Set serviceIds) { + state.addTrip(exemplar, serviceIds); + } + + private String hashPattern(GtfsMutableRelationalDao dao, Trip duplicateTrip) { + StringBuffer sb = new StringBuffer(); + for (StopTime stopTime : dao.getStopTimesForTrip(duplicateTrip)) { + sb.append(stopTime.getStop().getId().getId()); + sb.append(":"); + sb.append(stopTime.getArrivalTime()); + sb.append(":"); + sb.append(stopTime.getDepartureTime()); + sb.append(":"); + } + if (sb.length() == 0) + return "empty"; // this is technically an error but support it just in case + return sb.substring(0, sb.length() - 1); + } + + // index ATIS trips by mta_trip_id + private HashMap> buildTripMap(DuplicateState state, GtfsMutableRelationalDao dao) { + HashMap> tripsByMtaTripId = new HashMap<>(); + + for (Trip trip : dao.getAllTrips()) { + if (trip.getMtaTripId() != null) { + if (!tripsByMtaTripId.containsKey(trip.getMtaTripId())) { + tripsByMtaTripId.put(trip.getMtaTripId(), new ArrayList<>()); } + tripsByMtaTripId.get(trip.getMtaTripId()).add(trip); + } else { + _log.info("trip {} mta_trip_id is null", trip.getId()); + state.mtaIdNullCounter++; } } - //now we have a list of DuplicateTrips and their calendar dates - //a lot of the DuplicateTrips will have the same list of calendar dates. Don't create duplicate calendar entries unnecessarily - - //Create a unique list of calendar dates to add - HashMap> dateMap = new HashMap<>(); - - //for each duplicateTrips in the list, get the list of caldate entries - //if the caldate entries is in the dateMap, change the Service Id for the duplicate trip - //if its not in there, then add it - int newDates = 0; - for (DuplicateTrips dts : duplicateTripData) { - //first time through, populate dateMap - if (dateMap.isEmpty()) { - dateMap.put(dts.getServiceId(), dts.getDates()); - } else { - boolean addNewDateMap = true; - for (HashMap.Entry> calDate : dateMap.entrySet()) { - ArrayList scds = (ArrayList) calDate.getValue(); - //scds is a unique list of service calendar dates in the map - if (new HashSet(dts.getDates()).equals(new HashSet(scds))) { - //we already have a list of the same dates. Re-use the service id - addNewDateMap = false; - //set the service date id in DuplicateTrips to be this one - dts.setServiceId(calDate.getKey()); - break; + + return tripsByMtaTripId; + } + + private void logDuplicates(HashMap> tripsByMtaTripId, HashMap referenceTripsByTripId) { + if (_log.isDebugEnabled()) { + //this is just for logging if dups are in reference + Iterator entries2 = tripsByMtaTripId.entrySet().iterator(); + while (entries2.hasNext()) { + HashMap.Entry entry = (HashMap.Entry) entries2.next(); + ArrayList trips = (ArrayList) entry.getValue(); + if (trips.size() > 1) { + //these are duplicates + if (referenceTripsByTripId.containsKey(entry.getKey())) { + _log.info("Duplicate trip id {} is in reference", entry.getKey()); } } - //there was no match, update the date map and add new serviceId - if (addNewDateMap) { - //dates don't exist, add new entry to date map and add service id - dateMap.put(dts.getServiceId(), dts.getDates()); - newDates = newDates + dts.getDates().size(); - } } } + } - int serviceIds = 0; - //Now the list is compete, add the new service id and dates - for (HashMap.Entry> calDateId : dateMap.entrySet()) { - AgencyAndId newServiceId = new AgencyAndId(agency, calDateId.getKey()); - ArrayList scds = calDateId.getValue(); - //need a list of the service cal dates, iterate, add - for (ServiceCalendarDate calDate : scds) { - serviceIds++; - //for each date, create a new calendar_dates entry with the new service_id - ServiceCalendarDate newScd = new ServiceCalendarDate(); - newScd.setServiceId(newServiceId); - newScd.setDate(calDate.getDate()); - newScd.setExceptionType(calDate.getExceptionType()); - dao.saveOrUpdateEntity(newScd); - } + + /** + * Internal state of the algorithm. + */ + private static class DuplicateState { + private int mtaIdNullCounter = 0; + private int serviceIdCounter = 0; + private int duplicateTripIdCounter = 0; + private int deletedTripCounter = 0; + private Map, List> tripsByServiceIds = new HashMap<>(); + private List tripsToRemove = new ArrayList<>(); + private GtfsMutableRelationalDao dao; + private String calendarAgencyId; + private Map atisToMtaTripId = new HashMap<>(); + + public DuplicateState(GtfsMutableRelationalDao dao, String calendarAgencyId) { + this.dao = dao; + this.calendarAgencyId = calendarAgencyId; + String largestServiceId = Collections.max(dao.getAllCalendarDates().stream().map(l->l.getServiceId().getId()).collect(Collectors.toSet())); + // here we make an assumption that service ids are numeric + serviceIdCounter = Integer.parseInt(largestServiceId) + 1; } - //trips updated, array of mta_ids that we've updated - HashMap tripsUpdated = new HashMap<>(); - ArrayList tripsToRemove = new ArrayList<>(); - - //update the trips with the new service_id - for (DuplicateTrips dts : duplicateTripData) { - AgencyAndId newServiceId = new AgencyAndId(agency, dts.getServiceId()); - for (Trip trip : dts.getTrips()) { - //for each trip, set the new service id - trip.setServiceId(newServiceId); - //now the trip_id has to be set with the mta_trip_id - //we have to have one as the one to keep and mark the others for deletion - //and then there needs to be a seperate method for all the deletions. - if (trip.getMtaTripId() != null) { - if (tripsUpdated.containsKey(trip.getMtaTripId())) { - tripsToRemove.add(trip); - } else { - tripsUpdated.put(trip.getMtaTripId(), trip); - trip.setId(new AgencyAndId(trip.getId().getAgencyId(), trip.getMtaTripId())); - } - } + public void addTrip(Trip exemplar, Set serviceIds) { + if (!tripsByServiceIds.containsKey(serviceIds)) { + tripsByServiceIds.put(serviceIds, new ArrayList<>()); } + tripsByServiceIds.get(serviceIds).add(exemplar); } - int stopsTimesToRemove = 0; - int remove = 0; - for (Trip tripToRemove : tripsToRemove) { - stopsTimesToRemove = stopsTimesToRemove + dao.getStopTimesForTrip(tripToRemove).size(); - removeEntityLibrary.removeTrip(dao, tripToRemove); - remove++; + public void removeTrip(List incoming) { + tripsToRemove.addAll(incoming); } - _log.info("Added Service Cal dates: {}, Removed trips: {}, Removed stoptimes: {}", serviceIds, remove, stopsTimesToRemove); - _log.info("Routes: {} Trips: {} Stops: {} Stop times: {} CalDates: {} ", dao.getAllRoutes().size(), dao.getAllTrips().size(), dao.getAllStops().size(), dao.getAllStopTimes().size(), dao.getAllCalendarDates().size()); - } - - private boolean checkForServiceToday(ArrayList trips, GtfsMutableRelationalDao dao) { - //if the stop times are not equal, check and see if any of the trips are running today. - //if the trip is running today, then copy over the id for this one trip, - //we'll ignore the rest of the trips and break the trip loop. - Date today = removeTime(new Date()); - if (trips.size() > 2) { - _log.info("There are more than two matches for this trip id {}", trips.get(0).getMtaTripId()); + public void apply() { + generateServiceIds(); + deleteTrips(); + applyNewTripIds(); } - for (Trip trip : trips) { - for (ServiceCalendarDate calDate : dao.getCalendarDatesForServiceId(trip.getServiceId())) { - Date date = constructDate(calDate.getDate()); - if (calDate.getExceptionType() == 1 && date.equals(today)) { - //_log.info("Copying over id for {} {}", trip.getId(), trip.getMtaTripId()); - //trip is today, copy of the mta_id for this one and quit - trip.setId(new AgencyAndId(trip.getId().getAgencyId(), trip.getMtaTripId())); - return true; + + private void applyNewTripIds() { + Map mtaTripIdCounts = new HashMap<>(); + for (Map.Entry entry : atisToMtaTripId.entrySet()) { + AgencyAndId atisTripId = entry.getKey(); + AgencyAndId mtaTripId = entry.getValue(); + AgencyAndId rawMtaTripId = removeTag(mtaTripId); + if (!mtaTripIdCounts.containsKey(rawMtaTripId)) { + mtaTripIdCounts.put(rawMtaTripId, 1); + } else { + mtaTripIdCounts.put(rawMtaTripId, mtaTripIdCounts.get(rawMtaTripId)+1); } } - } - return false; - } - private int getNextServiceId(GtfsMutableRelationalDao dao) { - ArrayList idList = new ArrayList<>(); - for (ServiceCalendarDate svcDate : dao.getAllCalendarDates()) { - if (isInt(svcDate.getServiceId().getId())) { - idList.add(Integer.parseInt(svcDate.getServiceId().getId())); + + for (Map.Entry agencyAndIdAgencyAndIdEntry : atisToMtaTripId.entrySet()) { + AgencyAndId atisTripId = agencyAndIdAgencyAndIdEntry.getKey(); + AgencyAndId mtaTripId = agencyAndIdAgencyAndIdEntry.getValue(); + AgencyAndId rawMtaTripId = removeTag(mtaTripId); + int occurrenceCount = mtaTripIdCounts.get(rawMtaTripId); + Trip toModify = dao.getTripForId(atisTripId); + if (toModify != null) { + if (occurrenceCount > 1) { + // it is a duplicate + incDuplicateCount(); + toModify.setId(mtaTripId); + } else { + // we've pruned it to be unique, remove the dup tagging + toModify.setId(removeTag(mtaTripId)); + } + } else { + // the trip has already been deleted, nothing to do + System.out.println("non-existent trip " + atisTripId + "/" + mtaTripId); + } + } - } - return Collections.max(idList) + 1; - } - private boolean isInt(String str) { - if (str == null) { - return false; } - int length = str.length(); - if (length == 0) { - return false; + + private void incDuplicateCount() { + duplicateTripIdCounter++; } - for (int i = 0; i < length; i++) { - char c = str.charAt(i); - if (c < '0' || c > '9') { - return false; + + private AgencyAndId removeTag(AgencyAndId mtaTripId) { + int pos = mtaTripId.getId().lastIndexOf("-dup-"); + if (pos > 1) { + return new AgencyAndId(mtaTripId.getAgencyId(), mtaTripId.getId().substring(0, pos)); } + return mtaTripId; } - return true; - } - private boolean stopTimesEqual(List s1, List s2) { - if (s1.size() != s2.size()) { - //_log.info("Not equal on size {} {}", s1.size(), s2.size()); - return false; + private void deleteTrips() { + RemoveEntityLibrary removeEntityLibrary = new RemoveEntityLibrary(); + + for (Trip tripToRemove : tripsToRemove) { + deletedTripCounter++; + removeEntityLibrary.removeTrip(dao, tripToRemove); + atisToMtaTripId.remove(tripToRemove.getId()); + } } - int index = 0; - for (int i = 0; i < s1.size(); i++) { - if(!s1.get(i).getStop().equals(s2.get(i).getStop())) { - //_log.info("Stops {} {}", s1.get(i).getStop(), s2.get(i).getStop()); - return false; + + private void generateServiceIds() { + for (Map.Entry, List> tripsBySet : tripsByServiceIds.entrySet()) { + Set calendarIds = tripsBySet.getKey(); + List trips = tripsBySet.getValue(); + AgencyAndId newServiceId = generateServiceId(calendarIds); + for (Trip trip : trips) { + trip.setServiceId(newServiceId); + } } - if(s1.get(i).getDepartureTime() != s2.get(i).getDepartureTime()) { - //_log.info("Dep time {} {}", s1.get(i).getDepartureTime(), s2.get(i).getDepartureTime()); - return false; + + } + + private AgencyAndId generateServiceId(Set calendarIds) { + List dates = new ArrayList<>(); + for (AgencyAndId calendarId : calendarIds) { + List calendarDatesForServiceId = dao.getCalendarDatesForServiceId(calendarId); + dates.addAll(calendarDatesForServiceId); } - if(s1.get(i).getArrivalTime() != s2.get(i).getArrivalTime()) { - //_log.info("Arr time {} {}", s1.get(i).getArrivalTime(), s2.get(i).getArrivalTime()); - return false; + + AgencyAndId newServiceId = generateServiceId(); + + for (ServiceCalendarDate calDate : dates) { + ServiceCalendarDate newDate = new ServiceCalendarDate(); + newDate.setServiceId(newServiceId); + newDate.setDate(calDate.getDate()); + newDate.setExceptionType(calDate.getExceptionType()); + dao.saveOrUpdateEntity(newDate); } + + return newServiceId; } - return true; - } - private Date constructDate(ServiceDate date) { - Calendar calendar = Calendar.getInstance(); - calendar.set(Calendar.YEAR, date.getYear()); - calendar.set(Calendar.MONTH, date.getMonth()-1); - calendar.set(Calendar.DATE, date.getDay()); - Date date1 = calendar.getTime(); - date1 = removeTime(date1); - return date1; - } + private AgencyAndId generateServiceId() { + serviceIdCounter++; + return new AgencyAndId(calendarAgencyId, String.valueOf(serviceIdCounter)); + } - private Date removeTime(Date date) { - Calendar calendar = Calendar.getInstance(); - calendar.setTime(date); - calendar.set(Calendar.HOUR_OF_DAY, 0); - calendar.set(Calendar.MINUTE, 0); - calendar.set(Calendar.SECOND, 0); - calendar.set(Calendar.MILLISECOND, 0); - date = calendar.getTime(); - return date; + public void addTripToTrack(AgencyAndId atisId, AgencyAndId mtaTripId) { + atisToMtaTripId.put(atisId, mtaTripId); + } } + } diff --git a/onebusaway-gtfs/src/main/java/org/onebusaway/gtfs/model/DuplicateTrips.java b/onebusaway-gtfs/src/main/java/org/onebusaway/gtfs/model/DuplicateTrips.java deleted file mode 100644 index ae5f82950..000000000 --- a/onebusaway-gtfs/src/main/java/org/onebusaway/gtfs/model/DuplicateTrips.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Copyright (C) 2018 Cambridge Systematics, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.onebusaway.gtfs.model; - -import java.util.ArrayList; - -public class DuplicateTrips { - - private String id; - - private String serviceId; - - private ArrayList trips = new ArrayList(); - - private ArrayList dates = new ArrayList(); - - public DuplicateTrips() { - - } - - public DuplicateTrips(String id, String svcId, ArrayList trips) { - this.setId(id); - this.setServiceId(svcId); - this.setTrips(trips); - } - - public DuplicateTrips(DuplicateTrips dts) { - this.setId(dts.getId()); - this.setServiceId(dts.getServiceId()); - this.setTrips(dts.getTrips()); - this.setDates(dts.getDates()); - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public String getServiceId() { - return serviceId; - } - - public void setServiceId(String serviceId) { - this.serviceId = serviceId; - } - - public ArrayList getTrips() { - return trips; - } - - public void setTrips(ArrayList trips) { - this.trips = trips; - } - - public void addTrip(Trip trip) { - this.trips.add(trip); - } - - public ArrayList getDates() { - return dates; - } - - public void setDates(ArrayList dates) { - this.dates = dates; - } - - public void addServiceDate(ServiceCalendarDate date) { - this.dates.add(date); - } - - -}