#include <fstream>
#include <algorithm>
#include <iostream>
#include <vector>
#include <set>
#include <cstring>
#include <queue>
#include <deque>
using namespace std;
#define For(i, st, dr) for(int i = st; i <= dr; ++i)
ifstream fin("santa.in");
ofstream fout("santa.out");
const int MAXN = 46000;
const int MAXCOMP = 16;
const int inf = 0x3f3f3f3f;
int n, m, cs, cd, q, conex_s, conex_d, conex_q;
vector<int> graph[MAXN], conex, vertex;
vector< vector<int> > sets;
/*
Citire. #MLC
*/
void read() {
fin >> n >> m;
For (i, 1, m) {
int a, b;
fin >> a >> b;
graph[a].push_back(b);
graph[b].push_back(a);
}
fin >> cs >> cd >> q;
}
/*
Determinam componentele biconexe // WORKING 100%
*/
int how_high[MAXN], deep[MAXN], p;
vector<int> stiva;
void add_biconex (int nod, int node) {
vector<int> cur;
while (stiva.back() != node) {
cur.push_back(stiva.back());
stiva.pop_back();
}
stiva.pop_back();
cur.push_back(node); cur.push_back(nod);
sets.push_back(cur);
}
void dfs_new_nod (int nod) {
++p;
deep[nod] = p;
how_high[nod] = p;
stiva.push_back(nod);
}
void make_dfs(int nod) {
dfs_new_nod(nod);
For (i, 0, (int)graph[nod].size() - 1) {
int node = graph[nod][i];
if (deep[node]) {
how_high[nod] = min (how_high[nod], deep[node]);
continue;
}
make_dfs(node);
how_high[nod] = min (how_high[nod], how_high[node]);
if (how_high[node] == deep[nod])
add_biconex(nod, node);
}
}
void test_dfs() {
cerr << "\nTest DFS\n";
For (i, 0, (int)sets.size() - 1) {
For (j, 0, (int)sets[i].size() - 1)
cerr << sets[i][j] << " ";
cerr << "\n";
}
cerr << "\n";
}
/*
Determinam componentele biconexe folosite // WORKING? 100%
*/
int used[MAXN], conex_chance[MAXN], in_set[MAXN];
vector<int> drum;
vector<int> vertex_to_set[MAXN];
bool get_drum(int nod) {
used[nod] = 1;
drum.push_back(nod);
if (nod == cd) return 1;
For (i, 0, (int)graph[nod].size() - 1) {
int node = graph[nod][i];
if (!used[node])
if (get_drum(node)) return 1;
}
drum.pop_back();
return 0;
}
void det_comp(int nod, int &conex) {
For (i, 0, (int)vertex_to_set[nod].size() - 1)
if (conex_chance[vertex_to_set[nod][i]] > 1) {
conex = vertex_to_set[nod][i];
return;
}
}
bool make_vertex_set() {
get_drum(cs);
For (i, 0, (int)sets.size() - 1) {
For (j, 0, (int)sets[i].size() - 1)
vertex_to_set[sets[i][j]].push_back(i);
}
For (i, 0, (int)drum.size() - 1) {
int nod = drum[i];
For (j, 0, (int)vertex_to_set[nod].size() - 1)
++conex_chance[vertex_to_set[nod][j]];
}
For (i, 0, (int)drum.size() - 1) {
int nod = drum[i];
if (vertex_to_set[nod].size() == 1) {
int comp = vertex_to_set[nod][0];
if (!in_set[comp]) {
conex.push_back(comp);
in_set[comp] = 1;
}
}
else {
For (i, 0, (int)vertex_to_set[nod].size() - 1) {
int comp = vertex_to_set[nod][i];
if (conex_chance[comp] > 1)
if (!in_set[comp]){
conex.push_back(comp);
vertex.push_back(nod);
in_set[comp] = 1;
}
}
}
}
det_comp(cs, conex_s); det_comp(cd, conex_d); det_comp(q, conex_q);
if (conex_q != conex_s && conex_q != conex_d) return 0;
return 1;
}
void test_vertex_set() {
cerr << "\nTest Vertex Set\n";
For (i, 0, (int)conex.size() - 1)
cerr << conex[i] << " ";
cerr << "\n";
For (i, 0, (int)vertex.size() - 1)
cerr << vertex[i] << " ";
cerr << "\n";
}
/*
Construim drumul lui mosu' IN DEVELOPMENT
Funtie curata - curata nodurile nevizitate din graf pentru a nu mai fi parcurse inca o data // TO DO pentru optimizare
*/
vector<int> coada;
bool get_hamilton(int &comp, int nod, int &dest) {
coada.push_back(nod);
used[nod] = 1;
cerr << nod << "\n";
if (nod == dest) {
if (coada.size() == sets[comp].size())
return 1;
used[nod] = 0;
coada.pop_back();
return 0;
}
For (i, 0, (int)graph[nod].size() - 1) {
int node = graph[nod][i];
if (!used[node] && binary_search(sets[comp].begin(), sets[comp].end(), node))
if (get_hamilton(comp, node, dest)) return 1;
}
used[nod] = 0;
coada.pop_back();
}
void init_back() {
memset(used, 0, sizeof used);
coada.clear();
}
bool make_drum_santa() {
if (conex_q == conex_d) {
reverse(conex.begin(), conex.end());
reverse(vertex.begin(), vertex.end());
swap(cs, cd);
}
vertex.insert(vertex.begin(), q);
vertex.insert(vertex.end(), cd);
drum.clear();
For (i, 0, (int)conex.size() - 1) {
init_back(); sort (sets[conex[i]].begin(), sets[conex[i]].end());
if (!get_hamilton(conex[i], vertex[i], vertex[i + 1])) return 0;
drum.insert(drum.end(), coada.begin(), coada.end() - 1);
}
drum.push_back(cd);
return 1;
}
void print() {
fout << drum.size() << "\n";
For (i, 0, (int)drum.size() - 1)
fout << drum[i] << " ";
}
int main() {
read();
make_dfs(cs);
test_dfs();
if (!make_vertex_set()) {
fout << "No chance\n";
return 0;
}
test_vertex_set();
if (!make_drum_santa()) {
fout << "No chance\n";
return 0;
}
print();
return 0;
}